summaryrefslogtreecommitdiff
path: root/Master/bin
diff options
context:
space:
mode:
Diffstat (limited to 'Master/bin')
-rwxr-xr-xMaster/bin/alpha-linux/mtxrun3992
-rwxr-xr-xMaster/bin/amd64-freebsd/mtxrun3992
-rwxr-xr-xMaster/bin/amd64-kfreebsd/mtxrun3992
-rwxr-xr-xMaster/bin/i386-cygwin/mtxrun3992
-rwxr-xr-xMaster/bin/i386-freebsd/mtxrun3992
-rwxr-xr-xMaster/bin/i386-kfreebsd/mtxrun3992
-rwxr-xr-xMaster/bin/i386-linux/mtxrun3992
-rwxr-xr-xMaster/bin/i386-netbsd/mtxrun3992
-rwxr-xr-xMaster/bin/i386-solaris/mtxrun3992
-rwxr-xr-xMaster/bin/mips-irix/mtxrun3992
-rwxr-xr-xMaster/bin/powerpc-aix/mtxrun3992
-rwxr-xr-xMaster/bin/powerpc-linux/mtxrun3992
-rwxr-xr-xMaster/bin/sparc-linux/mtxrun3992
-rwxr-xr-xMaster/bin/sparc-solaris/mtxrun3992
-rwxr-xr-xMaster/bin/universal-darwin/mtxrun3992
-rw-r--r--[-rwxr-xr-x]Master/bin/win32/mtxrun.dllbin9216 -> 9216 bytes
-rw-r--r--[-rwxr-xr-x]Master/bin/win32/mtxrun.lua3992
-rwxr-xr-xMaster/bin/x86_64-darwin/mtxrun3992
-rwxr-xr-xMaster/bin/x86_64-linux/mtxrun3992
-rwxr-xr-xMaster/bin/x86_64-solaris/mtxrun3992
20 files changed, 50787 insertions, 25061 deletions
diff --git a/Master/bin/alpha-linux/mtxrun b/Master/bin/alpha-linux/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/alpha-linux/mtxrun
+++ b/Master/bin/alpha-linux/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/amd64-freebsd/mtxrun b/Master/bin/amd64-freebsd/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/amd64-freebsd/mtxrun
+++ b/Master/bin/amd64-freebsd/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/amd64-kfreebsd/mtxrun b/Master/bin/amd64-kfreebsd/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/amd64-kfreebsd/mtxrun
+++ b/Master/bin/amd64-kfreebsd/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/i386-cygwin/mtxrun b/Master/bin/i386-cygwin/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/i386-cygwin/mtxrun
+++ b/Master/bin/i386-cygwin/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/i386-freebsd/mtxrun b/Master/bin/i386-freebsd/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/i386-freebsd/mtxrun
+++ b/Master/bin/i386-freebsd/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/i386-kfreebsd/mtxrun b/Master/bin/i386-kfreebsd/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/i386-kfreebsd/mtxrun
+++ b/Master/bin/i386-kfreebsd/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/i386-linux/mtxrun b/Master/bin/i386-linux/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/i386-linux/mtxrun
+++ b/Master/bin/i386-linux/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/i386-netbsd/mtxrun b/Master/bin/i386-netbsd/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/i386-netbsd/mtxrun
+++ b/Master/bin/i386-netbsd/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/i386-solaris/mtxrun b/Master/bin/i386-solaris/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/i386-solaris/mtxrun
+++ b/Master/bin/i386-solaris/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/mips-irix/mtxrun b/Master/bin/mips-irix/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/mips-irix/mtxrun
+++ b/Master/bin/mips-irix/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/powerpc-aix/mtxrun b/Master/bin/powerpc-aix/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/powerpc-aix/mtxrun
+++ b/Master/bin/powerpc-aix/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/powerpc-linux/mtxrun b/Master/bin/powerpc-linux/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/powerpc-linux/mtxrun
+++ b/Master/bin/powerpc-linux/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/sparc-linux/mtxrun b/Master/bin/sparc-linux/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/sparc-linux/mtxrun
+++ b/Master/bin/sparc-linux/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/sparc-solaris/mtxrun b/Master/bin/sparc-solaris/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/sparc-solaris/mtxrun
+++ b/Master/bin/sparc-solaris/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/universal-darwin/mtxrun b/Master/bin/universal-darwin/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/universal-darwin/mtxrun
+++ b/Master/bin/universal-darwin/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/win32/mtxrun.dll b/Master/bin/win32/mtxrun.dll
index 4116c5a24a6..4116c5a24a6 100755..100644
--- a/Master/bin/win32/mtxrun.dll
+++ b/Master/bin/win32/mtxrun.dll
Binary files differ
diff --git a/Master/bin/win32/mtxrun.lua b/Master/bin/win32/mtxrun.lua
index 7adcd3023a8..335c4fcb9e0 100755..100644
--- a/Master/bin/win32/mtxrun.lua
+++ b/Master/bin/win32/mtxrun.lua
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/x86_64-darwin/mtxrun b/Master/bin/x86_64-darwin/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/x86_64-darwin/mtxrun
+++ b/Master/bin/x86_64-darwin/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/x86_64-linux/mtxrun b/Master/bin/x86_64-linux/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/x86_64-linux/mtxrun
+++ b/Master/bin/x86_64-linux/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)
diff --git a/Master/bin/x86_64-solaris/mtxrun b/Master/bin/x86_64-solaris/mtxrun
index 7adcd3023a8..335c4fcb9e0 100755
--- a/Master/bin/x86_64-solaris/mtxrun
+++ b/Master/bin/x86_64-solaris/mtxrun
@@ -160,509 +160,6 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg = require("lpeg")
-
-local type = type
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
-local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
-local lpegtype = lpeg.type
-
-local utfcharacters = string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
- + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
- + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.float = sign^0 * digit^0 * P('.') * digit^1
-patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.eol = S("\n\r")
-patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.nonspacer = 1 - patterns.spacer
-patterns.whitespace = patterns.eol + patterns.spacer
-patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * patterns.spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-local unquoted = Cs(patterns.unquoted * endofstring) -- not C
-
-function string.unquoted(str)
- return match(unquoted,str) or str
-end
-
-
-function lpeg.anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) } -- why so complex?
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-local splitters_s, splitters_m = { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.split(str,separator)
- local c = cache[separator]
- if not c then
- c = Ct(splitat(separator))
- cache[separator] = c
- end
- return match(c,str)
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-
-local linesplitter = Ct(splitat(newline))
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return match(linesplitter,str)
-end
-
-local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
-
-patterns.utflinesplitter = utflinesplitter
-
-function string.utfsplitlines(str)
- return match(utflinesplitter,str)
-end
-
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return match(c,str)
-end
-
-
-local f1 = string.byte
-
-local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
-
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(P(1)^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * P(-1))^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
-function lpeg.replacer(one,two)
- if type(one) == "table" then
- local no = #one
- if no > 0 then
- local p
- for i=1,no do
- local o = one[i]
- local pp = P(o[1]) / o[2]
- if p then
- p = p + pp
- else
- p = pp
- end
- end
- return Cs((p + 1)^0)
- end
- else
- two = two or ""
- return Cs((P(one)/two + 1)^0)
- end
-end
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
-
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #match(pattern,str)
- end
-end
-
-if utfgmatch then
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function lpeg.count(str,what) -- replaces string.count
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + nany)^0)
- cache[p] = p
- end
- return #match(p,str)
- else -- 4 times slower but still faster than / function
- return #match(Cs((P(what)/" " + nany)^0),str)
- end
- end
-
-end
-
-local patterns_escapes = { -- also defines in l-string
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
- -- ["{"] = "%{", ["}"] = "%}"
- -- ["^"] = "%^", ["$"] = "%$",
-}
-
-local simple_escapes = { -- also defines in l-string
- ["-"] = "%-",
- ["."] = "%.",
- ["?"] = ".",
- ["*"] = ".*",
-}
-
-local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
-local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
-
-function string.escapedpattern(str,simple)
- return match(simple and s or p,str)
-end
-
--- utf extensies
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p
- for uc in utfcharacters(str) do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p
- for uc in utfgmatch(str,".") do
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p
- local f = function(uc)
- if p then
- p = p + P(uc)
- else
- p = P(uc)
- end
- end
- match((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
-
-local utfchar = unicode and unicode.utf8 and unicode.utf8.char
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = match(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
- local p
- for i=first,last do
- if p then
- p = p + P(utfchar(i))
- else
- p = P(utfchar(i))
- end
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- return utf8byte / f -- nil when invalid range
- end
-end
-
-
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
if not modules then modules = { } end modules ['l-table'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
@@ -675,11 +172,12 @@ local type, next, tostring, tonumber, ipairs, table, string = type, next, tostri
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
-- Starting with version 5.2 Lua no longer provide ipairs, which makes
-- sense. As we already used the for loop and # in most places the
-- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
+-- have been replaced. In a similar fashion we also hardly used pairs.
--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -1022,6 +520,8 @@ end
-- problem: there no good number_to_string converter with the best resolution
+local function dummy() end
+
local function do_serialize(root,name,depth,level,indexed)
if level > 0 then
depth = depth .. " "
@@ -1214,19 +714,20 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
- handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [0x%04X]=loadstring(%q),",depth,k,f))
else
- handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,k,f))
end
elseif tk == "boolean" then
- handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),f))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
- handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s %s=loadstring(%q),",depth,k,f))
else
- -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
- handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,f))
end
end
else
@@ -1571,6 +1072,730 @@ function table.has_one_entry(t)
return t and not next(t,next(t))
end
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+-- function table.sorted(t,...)
+-- table.sort(t,...)
+-- return t -- still sorts in-place
+-- end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+local lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+local report = texio and texio.write_nl or print
+
+
+
+
+local type = type
+local byte, char, gmatch = string.byte, string.char, string.gmatch
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.eol = S("\n\r")
+patterns.spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.nonspacer = 1 - patterns.spacer
+patterns.whitespace = patterns.eol + patterns.spacer
+patterns.nonwhitespace = 1 - patterns.whitespace
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function lpeg.anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) } -- why so complex?
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return match(splitters_m[separator] or splitat(separator),str)
+end
+
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.split(str,separator)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * tsplitat(newline)
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str or "")
+end
+
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return match(c,str)
+end
+
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
+ end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
+ end
+end
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
+ end
+end
+
+if utfgmatch then
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort, fastcopy, sortedkeys = table.sort, table.fastcopy, table.sortedkeys -- dependency!
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = fastcopy(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+-- for k, v in next, t do
+ for k, v in table.sortedhash(t) do
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list)
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
end -- of closure
@@ -1831,6 +2056,11 @@ function io.readstring(f,n,m)
return str
end
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
end -- of closure
@@ -2061,7 +2291,7 @@ if not modules then modules = { } end modules ['l-os'] = {
-- maybe build io.flush in os.execute
local os = os
-local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
local concat = table.concat
local random, ceil = math.random, math.ceil
local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
@@ -2167,9 +2397,9 @@ os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
if os.type == "windows" then
- os.libsuffix, os.binsuffix = 'dll', 'exe'
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
else
- os.libsuffix, os.binsuffix = 'so', ''
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
end
function os.launch(str)
@@ -2400,6 +2630,39 @@ function os.timezone(delta)
end
end
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
+
end -- of closure
@@ -2499,10 +2762,10 @@ end
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
-function file.join(...)
+function file.join(...) -- rather dirty
local lst = { ... }
local a, b = lst[1], lst[2]
- if a == "" then
+ if not a or a == "" then -- not a added
lst[1] = trick_1
elseif b and find(a,"^/+$") and find(b,"^/") then
lst[1] = ""
@@ -2523,6 +2786,15 @@ function file.join(...)
end
+-- We should be able to use:
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(dirname(name,"."))
+-- return a and sub(a.permissions,2,2) == "w"
+-- end
+--
+-- But after some testing Taco and I came up with:
+
function file.is_writable(name)
if lfs.isdir(name) then
name = name .. "/m_t_x_t_e_s_t.tmp"
@@ -2532,12 +2804,17 @@ function file.is_writable(name)
os.remove(name)
return true
end
- else
- local existing = lfs.isfile(name)
- f = io.open(name,"ab")
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
if f then
f:close()
- if not existing then os.remove(name) end
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
return true
end
end
@@ -2594,7 +2871,7 @@ function file.collapsepath(str,anchor)
if element == '.' then
-- do nothing
elseif element == '..' then
- local n = i -1
+ local n = i - 1
while n > 0 do
local element = oldelements[n]
if element ~= '..' and element ~= '.' then
@@ -2690,10 +2967,42 @@ local path = C(((1-slash)^0 * slash)^0)
local suffix = period * C(P(1-period)^0 * P(-1))
local base = C((1-suffix)^0)
-local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
-function file.splitname(str) -- returns drive, path, base, suffix
- return lpegmatch(pattern,str)
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix)
+
+function file.splitname(str,splitdrive)
+ if splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.nametotable(str,splitdrive) -- returns table
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
end
-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
@@ -2828,12 +3137,19 @@ local nothing = Cc("")
local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+local querystr = Cs((escaped+(1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
-local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon * slash * slash + nothing
-local authority = Cs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * Cs((escaped+(1- hash))^0) + nothing
-local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
local validurl = scheme * authority * path * query * fragment
local parser = Ct(validurl)
@@ -2854,11 +3170,14 @@ local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
local function hasscheme(str)
- local scheme = lpegmatch(scheme,str) -- at least one character
- return scheme and scheme ~= ""
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
end
+
-- todo: cache them
local rootletter = R("az","AZ")
@@ -3007,8 +3326,6 @@ local attributes = lfs.attributes
local walkdir = lfs.dir
local isdir = lfs.isdir
local isfile = lfs.isfile
-local mkdir = lfs.mkdir
-local chdir = lfs.chdir
local currentdir = lfs.currentdir
-- handy
@@ -3056,13 +3373,13 @@ local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
end
if ok and type(scanner) == "function" then
if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ for name in scanner, first do
local full = path .. name
local attr = attributes(full)
local mode = attr.mode
@@ -3245,7 +3562,7 @@ if onwindows then
pth = pth .. "/" .. s
end
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
return pth, (isdir(pth) == true)
@@ -3277,7 +3594,7 @@ else
pth = pth .. "/" .. s
end
if make_indeed and not first and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
else
@@ -3285,7 +3602,7 @@ else
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
if make_indeed and not isdir(pth) then
- mkdir(pth)
+ lfs.mkdir(pth)
end
end
end
@@ -3313,10 +3630,10 @@ if onwindows then
first, last = match(str,"^([a-zA-Z]:)(.*)$")
if first and not find(last,"^/") then
local d = currentdir()
- if chdir(first) then
+ if lfs.chdir(first) then
first = dir.current()
end
- chdir(d)
+ lfs.chdir(d)
end
end
if not first then
@@ -3381,10 +3698,6 @@ local type, tonumber = type, tonumber
boolean = boolean or { }
local boolean = boolean
--- function boolean.tonumber(b)
--- return b and 1 or 0 -- test and test and return or return
--- end
-
function boolean.tonumber(b)
if b then return 1 else return 0 end -- test and return or return
end
@@ -3545,7 +3858,7 @@ local function utf16_to_utf8_be(t)
if right then
local now = 256*left + right
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3573,7 +3886,7 @@ local function utf16_to_utf8_le(t)
if right then
local now = 256*right + left
if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
more = 0
r = r + 1
result[r] = utfchar(now)
@@ -3590,14 +3903,14 @@ local function utf16_to_utf8_le(t)
return t
end
-local function utf32_to_utf8_be(str)
+local function utf32_to_utf8_be(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*256*256*a + 256*256*b
@@ -3612,17 +3925,17 @@ local function utf32_to_utf8_be(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
-local function utf32_to_utf8_le(str)
+local function utf32_to_utf8_le(t)
if type(t) == "string" then
- t = utfsplitlines(str)
+ t = utfsplitlines(t)
end
local result = { } -- we reuse result
for i=1,#t do
local r, more = 0, -1
- for a,b in bytepairs(str) do
+ for a,b in bytepairs(t[i]) do
if a and b then
if more < 0 then
more = 256*b + a
@@ -3637,7 +3950,7 @@ local function utf32_to_utf8_le(str)
end
t[i] = concat(result,"",1,r)
end
- return result
+ return t
end
unicode.utf32_to_utf8_be = utf32_to_utf8_be
@@ -3706,15 +4019,67 @@ end
local lpegmatch = lpeg.match
-local utftype = lpeg.patterns.utftype
+local patterns = lpeg.patterns
+local utftype = patterns.utftype
function unicode.filetype(data)
return data and lpegmatch(utftype,data) or "unknown"
end
+local toentities = lpeg.Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -3750,8 +4115,8 @@ if not math.sind then
end
if not math.odd then
- function math.odd (n) return n % 2 == 0 end
- function math.even(n) return n % 2 ~= 0 end
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
end
@@ -3771,9 +4136,10 @@ utilities = utilities or {}
utilities.tables = utilities.tables or { }
local tables = utilities.tables
-local format, gmatch = string.format, string.gmatch
+local format, gmatch, rep = string.format, string.gmatch, string.rep
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber = type, next, rawset, tonumber
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -3789,14 +4155,31 @@ function tables.definetable(target) -- defines undefined tables
return concat(t,"\n")
end
-function tables.accesstable(target)
- local t = _G
+function tables.accesstable(target,root)
+ local t = root or _G
for name in gmatch(target,"([^%.]+)") do
t = t[name]
+ if not t then
+ return
+ end
end
return t
end
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
function tables.removevalue(t,value) -- todo: n
if value then
for i=1,#t do
@@ -3840,13 +4223,19 @@ end
-- experimental
-local function toxml(t,d,result)
+local function toxml(t,d,result,step)
for k, v in table.sortedpairs(t) do
if type(v) == "table" then
- result[#result+1] = format("%s<%s>",d,k)
- toxml(v,d.." ",result)
- result[#result+1] = format("%s</%s>",d,k)
- elseif tonumber(k) then
+ if type(k) == "number" then
+ result[#result+1] = format("%s<entry n='%s'>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</entry>",d,k)
+ else
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d..step,result,step)
+ result[#result+1] = format("%s</%s>",d,k)
+ end
+ elseif type(k) == "number" then
result[#result+1] = format("%s<entry n='%s'>%s</entry>",d,k,v,k)
else
result[#result+1] = format("%s<%s>%s</%s>",d,k,tostring(v),k)
@@ -3854,17 +4243,56 @@ local function toxml(t,d,result)
end
end
-function table.toxml(t,name,nobanner)
+function table.toxml(t,name,nobanner,indent,spaces)
local noroot = name == false
local result = (nobanner or noroot) and { } or { "<?xml version='1.0' standalone='yes' ?>" }
+ local indent = rep(" ",indent or 0)
+ local spaces = rep(" ",spaces or 1)
if noroot then
- toxml( t, "", result)
+ toxml( t, inndent, result, spaces)
else
- toxml( { [name or "root"] = t }, "", result)
+ toxml( { [name or "root"] = t }, indent, result, spaces)
end
return concat(result,"\n")
end
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(format("\ninvalid inheritance '%s' in '%s': %s",key,tostring(core)))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(format("\ninvalid overload '%s' in '%s'",key,tostring(core)))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
end -- of closure
@@ -3886,8 +4314,8 @@ local storage = utilities.storage
function storage.mark(t)
if not t then
- texio.write_nl("fatal error: storage '%s' cannot be marked",t)
- os.exit()
+ texio.write_nl("fatal error: storage cannot be marked")
+ return -- os.exit()
end
local m = getmetatable(t)
if not m then
@@ -3916,8 +4344,8 @@ end
function storage.checked(t)
if not t then
- texio.write_nl("fatal error: storage '%s' has not been allocated",t)
- os.exit()
+ texio.write_nl("fatal error: storage has not been allocated")
+ return -- os.exit()
end
return t
end
@@ -3946,13 +4374,13 @@ end
-- table namespace ?
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_ignore() end -- t,k,v
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-local t_empty = { __index = empty }
-local t_self = { __index = self }
-local t_ignore = { __newindex = ignore }
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_ignore = { __newindex = f_ignore }
function table.setmetatableindex(t,f)
local m = getmetatable(t)
@@ -3973,6 +4401,7 @@ function table.setmetatableindex(t,f)
setmetatable(t,{ __index = f })
end
end
+ return t
end
function table.setmetatablenewindex(t,f)
@@ -3990,6 +4419,7 @@ function table.setmetatablenewindex(t,f)
setmetatable(t,{ __newindex = f })
end
end
+ return t
end
function table.setmetatablecall(t,f)
@@ -3999,6 +4429,7 @@ function table.setmetatablecall(t,f)
else
setmetatable(t,{ __call = f })
end
+ return t
end
function table.setmetatablekey(t,key,value)
@@ -4008,6 +4439,7 @@ function table.setmetatablekey(t,key,value)
setmetatable(t,m)
end
m[key] = value
+ return t
end
function table.getmetatablekey(t,key,value)
@@ -4176,14 +4608,29 @@ utilities = utilities or {}
utilities.lua = utilities.lua or { }
utilities.report = logs and logs.reporter("system") or print
-function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+local function stupidcompile(luafile,lucfile)
+ local data = io.loaddata(luafile)
+ if data and data ~= "" then
+ data = string.dump(data)
+ if data and data ~= "" then
+ io.savedata(lucfile,data)
+ end
+ end
+end
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
utilities.report("lua: compiling %s into %s",luafile,lucfile)
os.remove(lucfile)
local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
if strip ~= false then
command = "-s " .. command
end
- local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ local done = os.spawn("texluac " .. command) == 0 -- or os.spawn("luac " .. command) == 0
+ if not done and fallback then
+ utilities.report("lua: dumping %s into %s (unstripped)",luafile,lucfile)
+ stupidcompile(luafile,lucfile) -- maybe use the stripper we have elsewhere
+ cleanup = false -- better see how worse it is
+ end
if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
utilities.report("lua: removing %s",luafile)
os.remove(luafile)
@@ -4210,9 +4657,9 @@ if not modules then modules = { } end modules ['util-prs'] = {
license = "see context related readme files"
}
-local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local P, R, V, C, Ct, Cs, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch = lpeg.match
-local concat, format, gmatch = table.concat, string.format, string.gmatch
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
local tostring, type, next = tostring, type, next
utilities = utilities or {}
@@ -4240,8 +4687,12 @@ local rbrace = P("}")
local nobrace = 1 - (lbrace+rbrace)
local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nested)^0) * (rbrace/""))
+local content = (1-P(-1))^0
-lpeg.patterns.nested = nested
+lpeg.patterns.nested = nested -- no capture
+lpeg.patterns.argument = argument -- argument after e.g. =
+lpeg.patterns.content = content -- rest after e.g =
local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
@@ -4322,9 +4773,15 @@ parsers.patterns.settings_to_array = pattern
-- we could use a weak table as cache
-function parsers.settings_to_array(str)
+function parsers.settings_to_array(str,strict)
if not str or str == "" then
return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
else
return lpegmatch(pattern,str)
end
@@ -4562,61 +5019,55 @@ local names = { }
-- one
local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ local f = getinfo(2) -- "nS"
if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or '<anonymous>'
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or '<anonymous>'
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
else
-- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
end
- else
- return "unknown"
+ counters[n] = (counters[n] or 0) + 1
end
end
-function debugger.showstats(printer,threshold)
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
printer = printer or texio.write or print
threshold = threshold or 0
local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
end
grandtotal = grandtotal + count
functions = functions + 1
end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
end
--- two
-
-
--- rest
-
function debugger.savestats(filename,threshold)
local f = io.open(filename,'w')
if f then
@@ -4638,6 +5089,7 @@ end
local is_node = node and node.is_node
+local is_lpeg = lpeg and lpeg.type
function inspect(i) -- global function
local ti = type(i)
@@ -4645,11 +5097,30 @@ function inspect(i) -- global function
table.print(i,"table")
elseif is_node and is_node(i) then
table.print(nodes.astable(i),tostring(i))
+ elseif is_lpeg and is_lpeg(i) then
+ lpeg.print(i)
else
print(tostring(i))
end
end
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
+
end -- of closure
@@ -4668,7 +5139,7 @@ if not modules then modules = { } end modules ['trac-inf'] = {
-- get warnings about assignments. This is more efficient than using rawset
-- and rawget.
-local format = string.format
+local format, lower = string.format, string.lower
local clock = os.gettimeofday or os.clock -- should go in environment
local write_nl = texio.write_nl
@@ -4770,10 +5241,10 @@ function statistics.show(reporter)
-- this code will move
local register = statistics.register
register("luatex banner", function()
- return string.lower(status.banner)
+ return lower(status.banner)
end)
register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
end)
register("callbacks", function()
local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
@@ -4847,7 +5318,7 @@ end
function commands.elapsedtime(name)
stoptiming(name or "whatever")
- tex.sprint(elapsedtime(name or "whatever"))
+ context(elapsedtime(name or "whatever"))
end
@@ -4931,6 +5402,10 @@ local function set(t,what,newvalue)
if type(what) ~= "table" then
return
end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
for w, value in next, what do
if value == "" then
value = newvalue
@@ -5167,6 +5642,8 @@ end
if texconfig then
+ -- this happens too late in ini mode but that is no problem
+
local function set(k,v)
v = tonumber(v)
if v then
@@ -5237,7 +5714,7 @@ local report, subreport, status, settarget, setformats, settranslations
local direct, subdirect, writer, pushtarget, poptarget
-if tex and tex.jobname or tex.formatname then
+if tex and (tex.jobname or tex.formatname) then
local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
@@ -5252,6 +5729,10 @@ if tex and tex.jobname or tex.formatname then
write_nl(target,...)
end
+ newline = function()
+ write_nl(target,"\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
@@ -5357,6 +5838,10 @@ else
writer = write_nl
+ newline = function()
+ write_nl("\n")
+ end
+
report = function(a,b,c,...)
if c then
write_nl(format("%-15s | %s",a,format(b,c,...)))
@@ -5416,6 +5901,7 @@ logs.settranslations = settranslations
logs.direct = direct
logs.subdirect = subdirect
logs.writer = writer
+logs.newline = newline
-- installer
@@ -5585,21 +6071,58 @@ local real, user, sub
function logs.start_page_number()
real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
end
-function logs.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s",real,user)
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
end
else
- report_pages("flushing realpage %s",real)
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
end
else
- report_pages("flushing page")
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
end
logs.flush()
end
@@ -5681,6 +6204,13 @@ local function reportbanner(t)
end
end
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
local function reporthelp(t,...)
local helpinfo = t.helpinfo
if type(helpinfo) == "string" then
@@ -5707,6 +6237,7 @@ function logs.application(t)
t.report = logs.reporter(t.name)
t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
t.identify = function() reportbanner(t) end
+ t.version = function() reportversion(t) end
return t
end
@@ -5772,6 +6303,11 @@ else
end
end
+-- do we still need io.flush then?
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
end -- of closure
@@ -6277,6 +6813,10 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
-- trouble
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
@@ -6301,10 +6841,11 @@ xml = xml or { }
local xml = xml
+local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar = unicode.utf8.char
+local utfchar, utffind, utfgsub = utf.char, utf.find, utf.gsub
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -6417,9 +6958,22 @@ element.</p>
local nsremap, resolvens = xml.xmlns, xml.resolvens
-local stack, top, dt, at, xmlns, errorstr, entities = { }, { }, { }, { }, { }, nil, { }
-local strip, cleanup, utfize, resolve, resolve_predefined, unify_predefined = false, false, false, false, false, false
-local dcache, hcache, acache = { }, { }, { }
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
local mt = { }
@@ -6585,7 +7139,7 @@ local predefined_unified = {
[42] = "&quot;",
[47] = "&apos;",
[74] = "&lt;",
- [76] = "&gr;",
+ [76] = "&gt;",
}
local predefined_simplified = {
@@ -6596,6 +7150,58 @@ local predefined_simplified = {
[76] = ">", gt = ">",
}
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&amp;",
+ [ [["]] ] = "&quot;",
+ [ [[']] ] = "&apos;",
+ [ [[<]] ] = "&lt;",
+ [ [[>]] ] = "&gt;",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local function escaped(s)
+ if s == "" then
+ return ""
+ else -- if utffind(s,privates_u) then
+ return (utfgsub(s,".",privates_u))
+ -- else
+ -- return s
+ end
+end
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local function unprivatized(s,resolve)
+ if s == "" then
+ return ""
+ else
+ return (utfgsub(s,".",privates_p))
+ end
+end
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
local function handle_hex_entity(str)
local h = hcache[str]
if not h then
@@ -6637,7 +7243,7 @@ local function handle_dec_entity(str)
if not n then
report_xml("utfize, ignoring dec entity &#%s;",str)
elseif trace_entities then
- report_xml("utfize, converting dec entity &#%s; into %s",str,h)
+ report_xml("utfize, converting dec entity &#%s; into %s",str,d)
end
else
if trace_entities then
@@ -6658,34 +7264,44 @@ local function handle_any_entity(str)
if not a then
a = resolve_predefined and predefined_simplified[str]
if a then
- -- one of the predefined
- elseif type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
if trace_entities then
- report_xml("resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (predefined)",str,a)
end
- a = lpegmatch(parsedentity,a) or a
else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
end
if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; (function)",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
if trace_entities then
- report_xml("resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
- if str == "" then
- a = "&error;"
+ if a then
+ if trace_entities then
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
+ end
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
end
end
end
@@ -6700,18 +7316,25 @@ local function handle_any_entity(str)
else
local a = acache[str]
if not a then
- if trace_entities then
- report_xml("found entity &%s;",str)
- end
a = resolve_predefined and predefined_simplified[str]
if a then
-- one of the predefined
acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %s",str,tostring(a))
+ end
elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
a = "&error;"
acache[str] = a
else
- a = "&" .. str .. ";"
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
acache[str] = a
end
end
@@ -6761,7 +7384,7 @@ local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dq
local endofattributes = slash * close + close -- recovery of flacky html
local whatever = space * name * optionalspace * equal
-local wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
@@ -6796,6 +7419,8 @@ local function normalentity(k,v ) entities[k] = v end
local function systementity(k,v,n) entities[k] = v end
local function publicentity(k,v,n) entities[k] = v end
+-- todo: separate dtd parser
+
local begindoctype = open * P("!DOCTYPE")
local enddoctype = close
local beginset = P("[")
@@ -6803,17 +7428,22 @@ local endset = P("]")
local doctypename = C((1-somespace-close)^0)
local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+
local normalentitytype = (doctypename * somespace * value)/normalentity
local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype) * optionalspace * close
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + space)^0 * optionalspace * endset
+-- we accept comments in doctypes
+
+local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + basiccomment + space)^0 * optionalspace * endset
local definitiondoctype= doctypename * somespace * doctypeset
local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
local simpledoctype = (1-close)^1 -- * balanced^0
local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
@@ -6845,17 +7475,30 @@ local grammar_unparsed_text = P { "preamble",
children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction,
}
--- maybe we will add settinsg to result as well
+-- maybe we will add settings to result as well
-local function xmlconvert(data, settings)
- settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
- strip = settings.strip_cm_and_dt
- utfize = settings.utfize_entities
- resolve = settings.resolve_entities
+local function _xmlconvert_(data, settings)
+ settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
+ --
+ strip = settings.strip_cm_and_dt
+ utfize = settings.utfize_entities
+ resolve = settings.resolve_entities
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
- unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
- cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
+ unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -6893,7 +7536,7 @@ local function xmlconvert(data, settings)
else
errorhandler = errorhandler or xml.errorhandler
if errorhandler then
- xml.errorhandler("load",errorstr)
+ xml.errorhandler(format("load error: %s",errorstr))
end
end
else
@@ -6907,7 +7550,7 @@ local function xmlconvert(data, settings)
local v = rdt[k]
if type(v) == "table" and not v.special then -- always table -)
result.ri = k -- rootindex
-v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
break
end
end
@@ -6915,16 +7558,42 @@ v.__p__ = result -- new, experiment, else we cannot go back to settings, we nee
if errorstr and errorstr ~= "" then
result.error = true
end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
return result
end
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("")
+ end
+end
+
xml.convert = xmlconvert
-function xml.inheritedconvert(data,xmldata)
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
local settings = xmldata.settings
- settings.parent_root = xmldata -- to be tested
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
-- settings.no_root = true
- local xc = xmlconvert(data,settings)
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
-- xc.settings = nil
-- xc.entities = nil
-- xc.special = nil
@@ -6953,7 +7622,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.errorhandler = report
+xml.errorhandler = report_xml
--[[ldx--
<p>We cannot load an <l n='lpeg'/> from a filehandle so we need to load
@@ -7032,18 +7701,15 @@ alternative.</p>
function xml.checkbom(root) -- can be made faster
if root.ri then
- local dt, found = root.dt, false
+ local dt = root.dt
for k=1,#dt do
local v = dt[k]
if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- found = true
- break
+ return
end
end
- if not found then
- insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
- insert(dt, 2, "\n" )
- end
+ insert(dt, 1, { special=true, ns="", tg="@pi@", dt = { "xml version='1.0' standalone='yes'"} } )
+ insert(dt, 2, "\n" )
end
end
@@ -7054,14 +7720,14 @@ and then handle the lot.</p>
-- new experimental reorganized serialize
-local function verbose_element(e,handlers)
+local function verbose_element(e,handlers) -- options
local handle = handlers.handle
local serialize = handlers.serialize
local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
local ats = eat and next(eat) and { }
if ats then
for k,v in next, eat do
- ats[#ats+1] = format('%s=%q',k,v)
+ ats[#ats+1] = format('%s=%q',k,escaped(v))
end
end
if ern and trace_entities and ern ~= ens then
@@ -7077,7 +7743,7 @@ local function verbose_element(e,handlers)
for i=1,#edt do
local e = edt[i]
if type(e) == "string" then
- handle(e)
+ handle(escaped(e))
else
serialize(e,handlers)
end
@@ -7098,11 +7764,11 @@ local function verbose_element(e,handlers)
handle("<",etg,">")
end
for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "string" then
- handle(ei)
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
else
- serialize(ei,handlers)
+ serialize(e,handlers)
end
end
handle("</",etg,">")
@@ -7137,7 +7803,7 @@ local function verbose_root(e,handlers)
end
local function verbose_text(e,handlers)
- handlers.handle(e)
+ handlers.handle(escaped(e))
end
local function verbose_document(e,handlers)
@@ -7265,20 +7931,33 @@ local result
local xmlfilehandler = newhandlers {
name = "file",
- initialize = function(name) result = io.open(name,"wb") return result end,
- finalize = function() result:close() return true end,
- handle = function(...) result:write(...) end,
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
}
-- no checking on writeability here but not faster either
--
-- local xmlfilehandler = newhandlers {
--- initialize = function(name) io.output(name,"wb") return true end,
--- finalize = function() io.close() return true end,
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
-- handle = io.write,
-- }
-
function xml.save(root,name)
serialize(root,xmlfilehandler,name)
end
@@ -7287,28 +7966,34 @@ local result
local xmlstringhandler = newhandlers {
name = "string",
- initialize = function() result = { } return result end,
- finalize = function() return concat(result) end,
- handle = function(...) result[#result+1] = concat { ... } end
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
}
local function xmltostring(root) -- 25% overhead due to collecting
- if root then
- if type(root) == 'string' then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
+ if not root then
+ return ""
+ elseif type(root) == 'string' then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
end
- return ""
end
-local function xmltext(root) -- inline
+local function __tostring(root) -- inline
return (root and xmltostring(root)) or ""
end
initialize_mt = function(root) -- redefinition
- mt = { __tostring = xmltext, __index = root }
+ mt = { __tostring = __tostring, __index = root }
end
xml.defaulthandlers = handlers
@@ -7428,7 +8113,7 @@ xml.tocdata(e,"error")
--ldx]]--
function xml.tocdata(e,wrapper)
- local whatever = xmltostring(e.dt)
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
if wrapper then
whatever = format("<%s>%s</%s>",wrapper,whatever,wrapper)
end
@@ -7620,8 +8305,8 @@ apply_axis['child'] = function(list)
c = c + 1
collected[c] = dk
dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ en = en + 1
+ dk.ei = en
end
end
ll.en = en
@@ -7649,6 +8334,7 @@ local function collect(list,collected,c)
end
return c
end
+
apply_axis['descendant'] = function(list)
local collected, c = { }, 0
for l=1,#list do
@@ -7940,6 +8626,12 @@ local lp_or = P("|") / " or "
local lp_and = P("&") / " and "
local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
P("firstindex") / "1" +
P("lastindex") / "(#ll.__p__.dt or 1)" +
P("firstelement") / "1" +
@@ -7947,15 +8639,11 @@ local lp_builtin = P (
P("first") / "1" +
P("last") / "#list" +
P("rootposition") / "order" +
- P("position") / "l" + -- is element in finalizer
P("order") / "order" +
P("element") / "(ll.ei or 1)" +
P("index") / "(ll.ni or 1)" +
P("match") / "(ll.mi or 1)" +
- P("text") / "(ll.dt[1] or '')" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
+ -- P("namespace") / "ll.ns" +
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
@@ -7979,11 +8667,11 @@ local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: bett
end
end
-local lparent = lpeg.P("(")
-local rparent = lpeg.P(")")
+local lparent = P("(")
+local rparent = P(")")
local noparent = 1 - (lparent+rparent)
-local nested = lpeg.P{lparent * (noparent + lpeg.V(1))^0 * rparent}
-local value = lpeg.P(lparent * lpeg.C((noparent + nested)^0) * rparent) -- lpeg.P{"("*C(((1-S("()"))+V(1))^0)*")"}
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
local lp_number = S("+-") * R("09")^1
@@ -8135,7 +8823,10 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -8164,10 +8855,8 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self),
s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * #(1-P(":")) * Cc(register_child ),
--- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
s_parent = P("..") * Cc(register_parent ),
s_self = P("." ) * Cc(register_self ),
s_root = P("^^") * Cc(register_root ),
@@ -8194,13 +8883,13 @@ local pathparser = Ct { "patterns", -- can be made a bit faster by moving patter
expressions = expression / register_expression,
letters = R("az")^1,
- name = (1-lpeg.S("/[]()|:*!"))^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
negate = P("!") * Cc(false),
nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
nodetest = V("negate") + Cc(true),
nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * #(1-P("(")),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
@@ -8364,7 +9053,7 @@ local function profiled_apply(list,parsed,nofparsed,order)
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
+ collected = pi.finalizer(collected) -- no check on # here
p.matched = p.matched + 1
p.finalized = p.finalized + 1
return collected
@@ -8497,9 +9186,9 @@ end
expressions.child = function(e,pattern)
return applylpath(e,pattern) -- todo: cache
end
-expressions.count = function(e,pattern)
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
local collected = applylpath(e,pattern) -- todo: cache
- return (collected and #collected) or 0
+ return pattern and (collected and #collected) or 0
end
-- external
@@ -8508,7 +9197,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
return false
end
expressions.undefined = function(s)
@@ -8534,6 +9223,23 @@ expressions.lower = lower
expressions.number = tonumber
expressions.boolean = toboolean
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
-- user interface
local function traverse(root,pattern,handle)
@@ -8856,10 +9562,12 @@ local xml = xml
local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
-local gmatch, gsub = string.gmatch, string.gsub
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format = string.gmatch, string.gsub, string.format
+local utfbyte = utf.byte
local function report(what,pattern,c,e)
report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
@@ -9004,7 +9712,7 @@ local function xmltoelement(whatever,root)
end
local element
if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root)
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
else
element = whatever -- we assume a table
end
@@ -9031,18 +9739,30 @@ local function copiedelement(element,newparent)
end
function xml.delete(root,pattern)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
end
end
end
@@ -9105,32 +9825,39 @@ end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
end
end
end
@@ -9138,16 +9865,23 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
local collected = element and xmlapplylpath(root,pattern)
- if collected then
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
+ insert_e(collected[c])
end
end
end
@@ -9346,6 +10080,75 @@ function xml.remapname(root, pattern, newtg, newns, newrn)
end
--[[ldx--
+<p>Helper (for q2p).</p>
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("&#%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" <!ENTITY %s %q >",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+-- <?xml version='1.0' standalone='yes' ?>
+-- <root>
+-- <a>test &nbsp; test &#123; test</a>
+-- <b><![CDATA[oeps]]></b>
+-- </root>
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
<p>Here are a few synonyms.</p>
--ldx]]--
@@ -9378,6 +10181,53 @@ xml.remap_tag = xml.remaptag obsolete.remap_tag
xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
end -- of closure
@@ -9391,15 +10241,17 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
-local concat = string.concat
+local concat = table.concat
+local find = string.find
local xml = xml
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -9413,6 +10265,21 @@ local function all(collected)
return collected
end
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
local reverse = table.reversed
@@ -9429,34 +10296,37 @@ local function att(id,name)
end
local function count(collected)
- return (collected and #collected) or 0
+ return collected and #collected or 0
end
local function position(collected,n)
- if collected then
- n = tonumber(n) or 0
- if n < 0 then
- return collected[#collected + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
end
end
local function match(collected)
- return (collected and collected[1].mi) or 0 -- match
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
end
local function index(collected)
- if collected then
- return collected[1].ni
- end
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
end
local function attributes(collected,arguments)
- if collected then
+ if collected and #collected > 0 then
local at = collected[1].at
if arguments then
return at[arguments]
@@ -9467,7 +10337,7 @@ local function attributes(collected,arguments)
end
local function chainattribute(collected,arguments) -- todo: optional levels
- if collected then
+ if collected and #collected > 0 then
local e = collected[1]
while e do
local at = e.at
@@ -9485,108 +10355,169 @@ local function chainattribute(collected,arguments) -- todo: optional levels
return ""
end
-local function raw(collected) -- hybrid
- if collected then
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
local e = collected[1] or collected
- return (e and xmlserialize(e)) or "" -- only first as we cannot concat function
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
else
return ""
end
end
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
local function text(collected) -- hybrid
- if collected then
- local e = collected[1] or collected
- return (e and xmltostring(e.dt)) or ""
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
else
return ""
end
end
local function texts(collected)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
end
- return t
end
+ return t
end
local function tag(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- return c and c.tg
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
end
local function name(collected,n)
- if collected then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[#collected-n+1]
- end
- if c then
- if c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
end
end
local function tags(collected,nonamespace)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
end
- return t
end
+ return t
end
-local function empty(collected)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then -- maybe an extra tester for spacing only
- return false
- end
- elseif n > 1 then
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
return false
end
+ elseif n > 1 then
+ return false
end
end
end
@@ -9641,14 +10572,14 @@ function xml.raw(id,pattern)
end
end
-function xml.text(id,pattern)
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
if pattern then
-- return text(xmlfilter(id,pattern))
local collected = xmlfilter(id,pattern)
- return (collected and xmltostring(collected[1].dt)) or ""
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
elseif id then
-- return text(id)
- return xmltostring(id.dt) or ""
+ return xmltotext(id) or ""
else
return ""
end
@@ -9656,6 +10587,8 @@ end
xml.content = text
+--
+
function xml.position(id,pattern,n) -- element
return position(xmlfilter(id,pattern),n)
end
@@ -9664,8 +10597,8 @@ function xml.match(id,pattern) -- number
return match(xmlfilter(id,pattern))
end
-function xml.empty(id,pattern)
- return empty(xmlfilter(id,pattern))
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
end
xml.all = xml.filter
@@ -9719,7 +10652,7 @@ if not modules then modules = { } end modules ['data-ini'] = {
license = "see context related readme files",
}
-local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char
local concat = table.concat
local next, type = next, type
@@ -9781,7 +10714,7 @@ do
local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
if not homedir or homedir == "" then
- homedir = string.char(127) -- we need a value, later we wil trigger on it
+ homedir = char(127) -- we need a value, later we wil trigger on it
end
homedir = file.collapsepath(homedir)
@@ -9954,7 +10887,7 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
@@ -9974,19 +10907,6 @@ local resolvers = resolvers
-- all, when working on the main resolver code, I don't want to scroll
-- past this every time. See data-obs.lua for the gsub variant.
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
@@ -10043,12 +10963,9 @@ local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpeggi
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
- local restoreslashes = false
- if find(s, "//$") then restoreslashes = true end
s = validate(s)
if s then
n = n + 1 ; t[n] = s
- if restoreslashes and not find(s, "//$") then t[n] = t[n] .. '//' end
end
end
else
@@ -10081,41 +10998,56 @@ function resolvers.expandedpathfromlist(pathlist)
return newlist
end
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
}
-local homedir
-
-function resolvers.cleanpath(str)
- if not homedir then
- homedir = lpegmatch(cleanup,environment.homedir or "")
- if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
- if trace_expansions then
- report_expansions("no home dir set, ignoring dependent paths")
- end
- function resolvers.cleanpath(str)
- if find(str,"~") then
- return "" -- special case
- else
- return str and lpegmatch(cleanup,str)
- end
- end
- else
- cleanup = lpeg.replacer {
- { "!" , "" },
- { "\\" , "/" },
- { "~" , homedir },
- }
- function resolvers.cleanpath(str)
- return str and lpegmatch(cleanup,str)
+function resolvers.cleanpath(str) -- tricky, maybe only simple paths
+ local doslashes = (P("\\")/"/" + 1)^0
+ local donegation = (P("!") /"" )^0
+ local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "")
+ if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
+ end
+ function resolvers.cleanpath(str)
+ if not str or find(str,"~") then
+ return "" -- special case
+ else
+ return lpegmatch(cleanup,str)
end
end
+ else
+ local dohome = ((P("~")+P("$HOME"))/homedir)^0
+ local cleanup = Cs(donegation * dohome * doslashes)
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str) or ""
+ end
end
return resolvers.cleanpath(str)
end
+-- print(resolvers.cleanpath(""))
+-- print(resolvers.cleanpath("!"))
+-- print(resolvers.cleanpath("~"))
+-- print(resolvers.cleanpath("~/test"))
+-- print(resolvers.cleanpath("!~/test"))
+-- print(resolvers.cleanpath("~/test~test"))
+
-- This one strips quotes and funny tokens.
local expandhome = P("~") / "$HOME" -- environment.homedir
@@ -10140,8 +11072,8 @@ end
local cache = { }
----- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
-local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
+----- splitter = lpeg.tsplitat(S(ostype == "windows" and ";" or ":;")) -- maybe add ,
+local splitter = lpeg.tsplitat(";") -- as we move towards urls, prefixes and use tables we no longer do :
local backslashswapper = lpeg.replacer("\\","/")
@@ -10201,13 +11133,22 @@ end
-local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+-- a lot of this caching can be stripped away when we have ssd's everywhere
+--
+-- we could cache all the (sub)paths here if needed
local attributes, directory = lfs.attributes, lfs.dir
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local timer = { }
+local scanned = { }
+local nofscans = 0
+local scancache = { }
+
local function scan(files,spec,path,n,m,r)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs, nofdirs = { }, 0
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
for name in directory(full) do
if not lpegmatch(weird,name) then
local mode = attributes(full..name,'mode')
@@ -10245,25 +11186,122 @@ local function scan(files,spec,path,n,m,r)
files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
+ scancache[sub(full,1,-2)] = files
return files, n, m, r
end
-function resolvers.scanfiles(path,branch)
+local fullcache = { }
+
+function resolvers.scanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = fullcache[realpath]
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
if trace_locating then
- report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
end
- local realpath = resolvers.resolve(path) -- no shortcut
local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
- files.__path__ = path -- can be selfautoparent:texmf-whatever
- files.__files__ = n
- files.__directories__ = m
- files.__remappings__ = r
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ fullcache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
+ return files
+end
+
+local function simplescan(files,spec,path) -- first match only, no map and such
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs = { }
+ local nofdirs = 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ if not files[name] then
+ -- only first match
+ files[name] = path
+ end
+ elseif mode == 'directory' then
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
+ end
+ end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files = simplescan(files,spec,dirs[i])
+ end
+ end
+ return files
+end
+
+local simplecache = { }
+local nofsharedscans = 0
+
+function resolvers.simplescanfiles(path,branch,usecache)
+ statistics.starttiming(timer)
+ local realpath = resolvers.resolve(path) -- no shortcut
+ if usecache then
+ local files = simplecache[realpath]
+ if not files then
+ files = scancache[realpath]
+ if files then
+ nofsharedscans = nofsharedscans + 1
+ end
+ end
+ if files then
+ if trace_locating then
+ report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path)
+ end
+ return files
+ end
+ end
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path,branch or path)
+ end
+ local files = simplescan({ },realpath .. '/',"")
+ if trace_locating then
+ report_expansions("%s files found",table.count(files))
+ end
+ if usecache then
+ scanned[#scanned+1] = realpath
+ simplecache[realpath] = files
+ end
+ nofscans = nofscans + 1
+ statistics.stoptiming(timer)
return files
end
+function resolvers.scandata()
+ table.sort(scanned)
+ return {
+ n = nofscans,
+ shared = nofsharedscans,
+ time = statistics.elapsedtime(timer),
+ paths = scanned,
+ }
+end
+
end -- of closure
@@ -10361,12 +11399,12 @@ local relations = allocate { -- todo: handlers also here
mp = {
names = { "mp" },
variable = 'MPINPUTS',
- suffixes = { 'mp' },
+ suffixes = { 'mp', 'mpvi', 'mpiv', 'mpii' },
},
tex = {
names = { "tex" },
variable = 'TEXINPUTS',
- suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ suffixes = { 'tex', "mkvi", "mkiv", "mkii" },
},
icc = {
names = { "icc", "icc profile", "icc profiles" },
@@ -10503,7 +11541,7 @@ end
resolvers.updaterelations() -- push this in the metatable -> newindex
local function simplified(t,k)
- return rawget(t,lower(gsub(k," ","")))
+ return k and rawget(t,lower(gsub(k," ",""))) or nil
end
setmetatableindex(formats, simplified)
@@ -10589,6 +11627,7 @@ luatools with a recache feature.</p>
--ldx]]--
local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local serialize, serializetofile = table.serialize, table.tofile
local mkdirs, isdir = dir.mkdirs, lfs.isdir
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10651,11 +11690,7 @@ local function identify()
end
elseif not writable and caches.force then
local cacheparent = file.dirname(cachepath)
- -- TH: added 'or true' for deeply buried non-existent caches.
- -- file.is_writable() is not really important here, since this
- -- branch is only checked for as-yet non-existent paths, and
- -- it guards against mkdirs() failing, anyway.
- if file.is_writable(cacheparent) or true then
+ if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths)
if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
mkdirs(cachepath)
if isdir(cachepath) and file.is_writable(cachepath) then
@@ -10746,7 +11781,7 @@ function caches.usedpaths()
end
function caches.configfiles()
- return table.concat(resolvers.instance.specification,";")
+ return concat(resolvers.instance.specification,";")
end
function caches.hashed(tree)
@@ -10870,9 +11905,9 @@ function caches.savedata(filepath,filename,data,raw)
end
data.cache_uuid = os.uuid()
if caches.direct then
- file.savedata(tmaname,table.serialize(data,true,saveoptions))
+ file.savedata(tmaname,serialize(data,true,saveoptions))
else
- table.tofile(tmaname,data,true,saveoptions)
+ serializetofile(tmaname,data,true,saveoptions)
end
utilities.lua.compile(tmaname,tmcname)
end
@@ -10939,7 +11974,7 @@ function caches.savecontent(cachename,dataname,content)
content = content,
uuid = os.uuid(),
}
- local ok = io.savedata(luaname,table.serialize(data,true))
+ local ok = io.savedata(luaname,serialize(data,true))
if ok then
if trace_locating then
report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
@@ -11112,7 +12147,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- instance but for practical purposes we now avoid this and use a
-- instance variable. We always have one instance active (sort of global).
--- todo: cache:/// home:///
+-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
@@ -11155,7 +12190,58 @@ resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARE
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-resolvers.luacnfspec = '{selfautodir:,selfautoparent:};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,};{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+-- The web2c tex binaries as well as kpse have built in paths for the configuration
+-- files and there can be a depressing truckload of them. This is actually the weak
+-- spot of a distribution. So we don't want:
+--
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}'
+--
+-- but instead use:
+--
+-- resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c}}'
+--
+-- which does not make texlive happy as there is a texmf-local tree one level up
+-- (sigh), so we need this. (We can assume web2c as mkiv does not run on older
+-- texlives anyway.
+--
+-- texlive:
+--
+-- selfautodir:
+-- selfautoparent:
+-- selfautodir:share/texmf-local/web2c
+-- selfautodir:share/texmf/web2c
+-- selfautodir:texmf-local/web2c
+-- selfautodir:texmf/web2c
+-- selfautoparent:share/texmf-local/web2c
+-- selfautoparent:share/texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf/web2c
+--
+-- minimals:
+--
+-- home:texmf/web2c
+-- selfautoparent:texmf-local/web2c
+-- selfautoparent:texmf-context/web2c
+-- selfautoparent:texmf/web2c
+
+if this_is_texlive then
+ -- resolvers.luacnfspec = '{selfautodir:,selfautoparent:}{,{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = '{selfautodir:{/share,}/texmf-local/web2c,selfautoparent:{/share,}/texmf{-local,}/web2c}'
+ -- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c;selfautoparent:/texmf{-local,}/web2c'
+ resolvers.luacnfspec = 'selfautodir:;selfautoparent:;{selfautodir:,selfautoparent:}{/share,}/texmf{-local,}/web2c'
+else
+ resolvers.luacnfspec = 'home:texmf/web2c;selfautoparent:texmf{-local,-context,}/web2c'
+end
+
+-- which (as we want users to use the web2c path) be can be simplified to this:
+--
+-- if environment and environment.ownpath and string.find(environment.ownpath,"[\\/]texlive[\\/]") then
+-- resolvers.luacnfspec = 'selfautodir:/texmf-local/web2c,selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- else
+-- resolvers.luacnfspec = 'selfautoparent:/texmf-local/web2c,selfautoparent:/texmf/web2c'
+-- end
+
+
local unset_variable = "unset"
@@ -11177,7 +12263,7 @@ local instance = resolvers.instance or nil -- the current one (fast access)
function resolvers.setenv(key,value,raw)
if instance then
-- this one will be consulted first when we stay inside
- -- the current environment
+ -- the current environment (prefixes are not resolved here)
instance.environment[key] = value
-- we feed back into the environment, and as this is used
-- by other applications (via os.execute) we need to make
@@ -11257,6 +12343,7 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
remember = true,
diskcache = true,
renewcache = false,
+ renewtree = false,
loaderror = false,
savelists = true,
pattern = nil, -- lists
@@ -11348,7 +12435,7 @@ local function makepathexpression(str)
end
end
-local function reportcriticalvariables()
+local function reportcriticalvariables(cnfspec)
if trace_locating then
for i=1,#resolvers.criticalvars do
local k = resolvers.criticalvars[i]
@@ -11356,6 +12443,14 @@ local function reportcriticalvariables()
report_resolving("variable '%s' set to '%s'",k,v)
end
report_resolving()
+ if cnfspec then
+ if type(cnfspec) == "table" then
+ report_resolving("using configuration specification '%s'",concat(cnfspec,","))
+ else
+ report_resolving("using configuration specification '%s'",cnfspec)
+ end
+ end
+ report_resolving()
end
reportcriticalvariables = function() end
end
@@ -11370,7 +12465,7 @@ local function identify_configuration_files()
else
resolvers.luacnfstate = "environment"
end
- reportcriticalvariables()
+ reportcriticalvariables(cnfspec)
local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
local luacnfname = resolvers.luacnfname
for i=1,#cnfpaths do
@@ -11406,6 +12501,19 @@ local function load_configuration_files()
if blob then
local setups = instance.setups
local data = blob()
+ local parent = data and data.parent
+ if parent then
+ local filename = filejoin(pathname,parent)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local parentdata = blob()
+ if parentdata then
+ report_resolving("loading configuration file '%s'",filename)
+ data = table.merged(parentdata,data)
+ end
+ end
+ end
data = data and data.content
if data then
if trace_locating then
@@ -11495,11 +12603,6 @@ local function locate_file_databases()
local runtime = stripped == path
path = resolvers.cleanpath(path)
local spec = resolvers.splitmethod(stripped)
- -- TH Original did this the other way around (elseif and if tests
- -- reversed) but then the else branch was never reached, as 'runtime'
- -- would effectively be ignored. In turn, that meant that the wrong
- -- locator method was used, such that tree: and file: were treated
- -- identically (cached and no runtime search).
if runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
elseif spec.scheme == "cache" or spec.scheme == "file" then
@@ -11507,7 +12610,7 @@ local function locate_file_databases()
end
if trace_locating then
if runtime then
- report_resolving("locating list of '%s' (runtime)",path)
+ report_resolving("locating list of '%s' (runtime) (%s)",path,stripped)
else
report_resolving("locating list of '%s' (cached)",path)
end
@@ -11551,6 +12654,39 @@ local function save_file_databases() -- will become cachers
end
end
+function resolvers.renew(hashname)
+ if hashname and hashname ~= "" then
+ local expanded = resolvers.expansion(hashname) or ""
+ if expanded ~= "" then
+ if trace_locating then
+ report_resolving("identifying tree '%s' from '%s'",expanded,hashname)
+ end
+ hashname = expanded
+ else
+ if trace_locating then
+ report_resolving("identifying tree '%s'",hashname)
+ end
+ end
+ local realpath = resolvers.resolve(hashname)
+ if lfs.isdir(realpath) then
+ if trace_locating then
+ report_resolving("using path '%s'",realpath)
+ end
+ methodhandler('generators',hashname)
+ -- could be shared
+ local content = instance.files[hashname]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",hashname)
+ end
+ caches.savecontent(hashname,"files",content)
+ -- till here
+ else
+ report_resolving("invalid path '%s'",realpath)
+ end
+ end
+end
+
local function load_databases()
locate_file_databases()
if instance.diskcache and not instance.renewcache then
@@ -11838,6 +12974,7 @@ local function collect_files(names)
if dname == "" or find(dname,"^%.") then
dname = false
else
+dname = gsub(dname,"*","%.*")
dname = "/" .. dname .. "$"
end
local hashes = instance.hashes
@@ -11897,9 +13034,22 @@ local function collect_files(names)
return noffiles > 0 and filelist or nil
end
-function resolvers.registerintrees(name)
- if not find(name,"^%.") then
- instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
+local fit = { }
+
+function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname)
+ local foundintrees = instance.foundintrees
+ if usedmethod == "direct" and filename == foundname and fit[foundname] then
+ -- just an extra lookup after a test on presence
+ else
+ local t = {
+ filename = filename,
+ format = format ~= "" and format or nil,
+ filetype = filetype ~= "" and filetype or nil,
+ usedmethod = usedmethod,
+ foundname = foundname,
+ }
+ fit[foundname] = t
+ foundintrees[#foundintrees+1] = t
end
end
@@ -11919,297 +13069,423 @@ end
local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
--- this one is split in smaller functions but it needs testing
+-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched
-local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
- local result = { }
- local stamp = nil
- askedformat = askedformat or ""
- filename = collapsepath(filename)
- -- speed up / beware: format problem
- if instance.remember and not allresults then
- stamp = filename .. "--" .. askedformat
- if instance.found[stamp] then
+local collect_instance_files
+
+local function find_analyze(filename,askedformat,allresults)
+ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+ -- too tricky as filename can be bla.1.2.3:
+ --
+ -- if not suffixmap[ext] then
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
+ end
+ else
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- report_resolving("remembered file '%s'",filename)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
- resolvers.registerintrees(filename) -- for tracing used files
- return instance.found[stamp]
end
- end
- if not dangerous[askedformat] then
- if isreadable(filename) then
- if trace_detail then
- report_resolving("file '%s' found directly",filename)
- end
- if stamp then
- instance.found[stamp] = { filename }
+ else
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
- return { filename }
+ end
+ filetype = askedformat
+ if trace_locating then
+ report_resolving("using given filetype '%s'",filetype)
end
end
+ return filetype, wantedfiles
+end
+
+local function find_direct(filename,allresults)
+ if not dangerous[askedformat] and isreadable(filename) then
+ if trace_detail then
+ report_resolving("file '%s' found directly",filename)
+ end
+ return "direct", { filename }
+ end
+end
+
+local function find_wildcard(filename,allresults)
if find(filename,'%*') then
if trace_locating then
report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.findwildcardfiles(filename) -- we can use th elocal
- elseif file.is_qualified_path(filename) then
- if isreadable(filename) then
- if trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
- result = { filename }
- else
- local forcedname, ok, suffix = "", false, fileextname(filename)
- if suffix == "" then -- why
- local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- local s = format_suffixes[i]
- forcedname = filename .. "." .. s
- if isreadable(forcedname) then
- if trace_locating then
- report_resolving("no suffix, forcing format filetype '%s'", s)
- end
- result, ok = { forcedname }, true
- break
- end
- end
- end
- end
- if not ok and suffix ~= "" then
- -- try to find in tree (no suffix manipulation), here we search for the
- -- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
- local savedformat = askedformat
- local format = savedformat or ""
- if format == "" then
- askedformat = resolvers.formatofsuffix(suffix)
- end
- if not format then
- askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
- end
- --
- if basename ~= filename then
- local resolved = collect_instance_files(basename,askedformat,allresults)
- if #result == 0 then -- shouldn't this be resolved ?
- local lowered = lower(basename)
- if filename ~= lowered then
- resolved = collect_instance_files(lowered,askedformat,allresults)
- end
- end
- resolvers.format = savedformat
- --
- for r=1,#resolved do
- local rr = resolved[r]
- if find(rr,pattern) then
- result[#result+1], ok = rr, true
- end
- end
- end
- -- a real wildcard:
- --
- -- if not ok then
- -- local filelist = collect_files({basename})
- -- for f=1,#filelist do
- -- local ff = filelist[f][3] or ""
- -- if find(ff,pattern) then
- -- result[#result+1], ok = ff, true
- -- end
- -- end
- -- end
- end
- if not ok and trace_locating then
- report_resolving("qualified name '%s'", filename)
- end
+ local method, result = resolvers.findwildcardfiles(filename)
+ if result then
+ return "wildcard", result
end
- else
- -- search spec
- local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
- -- -- tricky as filename can be bla.1.2.3
- -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
- -- wantedfiles[#wantedfiles+1] = filename
- -- end
- wantedfiles[#wantedfiles+1] = filename
- if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
- local defaultsuffixes = resolvers.defaultsuffixes
- for i=1,#defaultsuffixes do
- local forcedname = filename .. '.' .. defaultsuffixes[i]
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.formatofsuffix(forcedname)
+ end
+end
+
+local function find_qualified(filename,allresults) -- this one will be split too
+ if not file.is_qualified_path(filename) then
+ return
+ end
+ if trace_locating then
+ report_resolving("checking qualified name '%s'", filename)
+ end
+ if isreadable(filename) then
+ if trace_detail then
+ report_resolving("qualified file '%s' found", filename)
+ end
+ return "qualified", { filename }
+ end
+ if trace_detail then
+ report_resolving("locating qualified file '%s'", filename)
+ end
+ local forcedname, suffix = "", fileextname(filename)
+ if suffix == "" then -- why
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
+ forcedname = filename .. "." .. s
+ if isreadable(forcedname) then
if trace_locating then
- report_resolving("forcing filetype '%s'",filetype)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
+ return "qualified", { forcedname }
end
- else
- filetype = resolvers.formatofsuffix(filename)
- if trace_locating then
- report_resolving("using suffix based filetype '%s'",filetype)
+ end
+ end
+ end
+ if suffix and suffix ~= "" then
+ -- try to find in tree (no suffix manipulation), here we search for the
+ -- matching last part of the name
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
+ local format = savedformat or ""
+ if format == "" then
+ askedformat = resolvers.formatofsuffix(suffix)
+ end
+ if not format then
+ askedformat = "othertextfiles" -- kind of everything, maybe all
+ end
+ --
+ if basename ~= filename then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #resolved == 0 then
+ local lowered = lower(basename)
+ if filename ~= lowered then
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
- else
- if ext == "" or not suffixmap[ext] then
- local format_suffixes = suffixes[askedformat]
- if format_suffixes then
- for i=1,#format_suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ resolvers.format = savedformat
+ --
+ if #resolved > 0 then
+ local result = { }
+ for r=1,#resolved do
+ local rr = resolved[r]
+ if find(rr,pattern) then
+ result[#result+1] = rr
end
end
+ if #result > 0 then
+ return "qualified", result
+ end
end
- filetype = askedformat
- if trace_locating then
- report_resolving("using given filetype '%s'",filetype)
- end
end
- local typespec = resolvers.variableofformat(filetype)
- local pathlist = resolvers.expandedpathlist(typespec)
- if not pathlist or #pathlist == 0 then
- -- no pathlist, access check only / todo == wildcard
- if trace_detail then
- report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ -- a real wildcard:
+ --
+ -- local filelist = collect_files({basename})
+ -- result = { }
+ -- for f=1,#filelist do
+ -- local ff = filelist[f][3] or ""
+ -- if find(ff,pattern) then
+ -- result[#result+1], ok = ff, true
+ -- end
+ -- end
+ -- if #result > 0 then
+ -- return "qualified", result
+ -- end
+ end
+end
+
+local function check_subpath(fname)
+ if isreadable(fname) then
+ if trace_detail then
+ report_resolving("found '%s' by deep scanning",fname)
+ end
+ return fname
+ end
+end
+
+local function find_intree(filename,filetype,wantedfiles,allresults)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
+ local method = "intree"
+ if pathlist and #pathlist > 0 then
+ -- list search
+ local filelist = collect_files(wantedfiles)
+ local dirlist = { }
+ if filelist then
+ for i=1,#filelist do
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
- for k=1,#wantedfiles do
- local fname = wantedfiles[k]
- if fname and isreadable(fname) then
- filename, done = fname, true
- result[#result+1] = filejoin('.',fname)
- break
+ end
+ if trace_detail then
+ report_resolving("checking filename '%s'",filename)
+ end
+ local result = { }
+ for k=1,#pathlist do
+ local path = pathlist[k]
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
+ if not find (pathname,'//$') then
+ doscan = false -- we check directly on the path
+ end
+ local done = false
+ -- using file list
+ if filelist then -- database
+ -- compare list entries with permitted pattern -- /xx /xx//
+ local expression = makepathexpression(pathname)
+ if trace_detail then
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
- end
- -- this is actually 'other text files' or 'any' or 'whatever'
- local filelist = collect_files(wantedfiles)
- local fl = filelist and filelist[1]
- if fl then
- filename = fl[3] -- not local?
- result[#result+1] = resolvers.resolve(filename)
- done = true
- end
- else
- -- list search
- local filelist = collect_files(wantedfiles)
- local dirlist = { }
- if filelist then
- for i=1,#filelist do
- dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
- end
- end
- if trace_detail then
- report_resolving("checking filename '%s'",filename)
- end
- for k=1,#pathlist do
- local path = pathlist[k]
- local pathname = lpegmatch(inhibitstripper,path)
- local doscan = path == pathname -- no ^!!
- if not find (pathname, '//$') then doscan = false end
- done = false
- -- using file list
- if filelist then
- -- compare list entries with permitted pattern -- /xx /xx//
- local expression = makepathexpression(pathname)
- if trace_detail then
- report_resolving("using pattern '%s' for path '%s'",expression,pathname)
- end
- for k=1,#filelist do
- local fl = filelist[k]
- local f = fl[2]
- local d = dirlist[k]
- if find(d,expression) then
- -- todo, test for readable
- result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
- done = true
- if allresults then
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
- end
- else
- if trace_detail then
- report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
- end
- break
+ for k=1,#filelist do
+ local fl = filelist[k]
+ local f = fl[2]
+ local d = dirlist[k]
+ if find(d,expression) then
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+ done = true
+ if allresults then
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
- elseif trace_detail then
- report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+ else
+ if trace_detail then
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+ end
+ break
end
+ elseif trace_detail then
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
- if not done then
- -- check if on disk / unchecked / does not work at all / also zips
- -- TH perhaps it did not work because of missing resolvers.resolve()
- -- and resolvers.scanfiles() calls ...
- pathname = resolvers.resolve(pathname)
- local scheme = url.hasscheme(pathname)
- if not scheme or scheme == "file" then
- local pname = gsub(pathname,"%.%*$",'')
- if not find(pname,"%*") then
- local ppname = gsub(pname,"/+$","")
- if can_be_dir(ppname) then
- local files = {}
- if doscan then files = resolvers.scanfiles(ppname) end
+ end
+ if done then
+ method = "database"
+ else
+ method = "filesystem" -- bonus, even when !! is specified
+ pathname = gsub(pathname,"/+$","")
+ pathname = resolvers.resolve(pathname)
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
+ local pname = gsub(pathname,"%.%*$",'')
+ if not find(pname,"%*") then
+ if can_be_dir(pname) then
+ -- quick root scan first
+ for k=1,#wantedfiles do
+ local w = wantedfiles[k]
+ local fname = check_subpath(filejoin(pname,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ if not done and doscan then
+ -- collect files in path (and cache the result)
+ local files = resolvers.simplescanfiles(pname,false,true)
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local subpath = ''
- if files[w] then
- subpath = type(files[w]) == "table" and files[w][1] or files[w]
- end
- local fname = filejoin(ppname,subpath,w)
- if isreadable(fname) then
- if trace_detail then
- report_resolving("found '%s' by scanning",fname)
+ local subpath = files[w]
+ if not subpath or subpath == "" then
+ -- rootscan already done
+ elseif type(subpath) == "string" then
+ local fname = check_subpath(filejoin(pname,subpath,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ else
+ for i=1,#subpath do
+ local sp = subpath[i]
+ if sp == "" then
+ -- roottest already done
+ else
+ local fname = check_subpath(filejoin(pname,sp,w))
+ if fname then
+ result[#result+1] = fname
+ done = true
+ if not allresults then
+ break
+ end
+ end
+ end
+ end
+ if done and not allresults then
+ break
end
- result[#result+1] = fname
- done = true
- if not allresults then break end
end
end
- else
- -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
+ else
+ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
end
end
- if not done and doscan then
- -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
- end
- if done and not allresults then break end
+ end
+ -- todo recursive scanning
+ if done and not allresults then
+ break
end
end
+ if #result > 0 then
+ return method, result
+ end
end
- for k=1,#result do
- local rk = collapsepath(result[k])
- result[k] = rk
- resolvers.registerintrees(rk) -- for tracing used files
+end
+
+local function find_onpath(filename,filetype,wantedfiles,allresults)
+ if trace_detail then
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ end
+ local result = { }
+ for k=1,#wantedfiles do
+ local fname = wantedfiles[k]
+ if fname and isreadable(fname) then
+ filename = fname
+ result[#result+1] = filejoin('.',fname)
+ if not allresults then
+ break
+ end
+ end
end
- if stamp then
- instance.found[stamp] = result
+ if #result > 0 then
+ return "onpath", result
end
- return result
end
--- -- -- begin of main file search routing -- -- --
-
-
-
-
-
-
-
+local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever
+ local filelist = collect_files(wantedfiles)
+ local fl = filelist and filelist[1]
+ if fl then
+ return "otherwise", { resolvers.resolve(fl[3]) } -- filename
+ end
+end
+-- we could have a loop over the 6 functions but then we'd have to
+-- always analyze
+collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
+ if allresults then
+ -- no need for caching, only used for tracing
+ local filetype, wantedfiles = find_analyze(filename,askedformat)
+ local results = {
+ { find_direct (filename,true) },
+ { find_wildcard (filename,true) },
+ { find_qualified(filename,true) },
+ { find_intree (filename,filetype,wantedfiles,true) },
+ { find_onpath (filename,filetype,wantedfiles,true) },
+ { find_otherwise(filename,filetype,wantedfiles,true) },
+ }
+ local result, status, done = { }, { }, { }
+ for k, r in next, results do
+ local method, list = r[1], r[2]
+ if method and list then
+ for i=1,#list do
+ local c = collapsepath(list[i])
+ if not done[c] then
+ result[#result+1] = c
+ done[c] = true
+ end
+ status[#status+1] = format("%-10s: %s",method,c)
+ end
+ end
+ end
+ if trace_detail then
+ report_resolving("lookup status: %s",table.serialize(status,filename))
+ end
+ return result, status
+ else
+ local method, result, stamp, filetype, wantedfiles
+ if instance.remember then
+ stamp = format("%s--%s", filename, askedformat)
+ result = stamp and instance.found[stamp]
+ if result then
+ if trace_locating then
+ report_resolving("remembered file '%s'",filename)
+ end
+ return result
+ end
+ end
+ method, result = find_direct(filename)
+ if not result then
+ method, result = find_wildcard(filename)
+ if not result then
+ method, result = find_qualified(filename)
+ if not result then
+ filetype, wantedfiles = find_analyze(filename,askedformat)
+ method, result = find_intree(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_onpath(filename,filetype,wantedfiles)
+ if not result then
+ method, result = find_otherwise(filename,filetype,wantedfiles)
+ end
+ end
+ end
+ end
+ end
+ if result and #result > 0 then
+ local foundname = collapsepath(result[1])
+ resolvers.registerintrees(filename,askedformat,filetype,method,foundname)
+ result = { foundname }
+ else
+ result = { } -- maybe false
+ end
+ if stamp then
+ if trace_locating then
+ report_resolving("remembering file '%s'",filename)
+ end
+ instance.found[stamp] = result
+ end
+ return result
+ end
+end
-- -- -- end of main file search routing -- -- --
+
local function findfiles(filename,filetype,allresults)
- local result = collect_instance_files(filename,filetype or "",allresults)
- if #result == 0 then
+ local result, status = collect_instance_files(filename,filetype or "",allresults)
+ if not result or #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered,filetype or "",allresults)
+ result, status = collect_instance_files(lowered,filetype or "",allresults)
end
end
- return result
+ return result or { }, status
end
function resolvers.findfiles(filename,filetype)
@@ -12370,6 +13646,10 @@ function resolvers.load(option)
return files and next(files) and true
end
+function resolvers.loadtime()
+ return statistics.elapsedtime(instance)
+end
+
local function report(str)
if trace_locating then
report_resolving(str) -- has already verbose
@@ -12383,6 +13663,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move
if trace_locating then
report('') -- ?
end
+ if type(files) == "string" then
+ files = { files }
+ end
for f=1,#files do
local file = files[f]
local result = command(file,...)
@@ -12540,6 +13823,8 @@ local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findg
local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+-- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on)
+
prefixes.environment = function(str)
return cleanpath(expansion(str))
end
@@ -12578,7 +13863,7 @@ end
prefixes.filename = function(str)
local fullname = findgivenfile(str) or ""
- return cleanpath(file.basename((fullname ~= "" and fullname) or str))
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here
end
prefixes.pathname = function(str)
@@ -12621,8 +13906,9 @@ function resolvers.allprefixes(separator)
end
local function _resolve_(method,target)
- if prefixes[method] then
- return prefixes[method](target)
+ local action = prefixes[method]
+ if action then
+ return action(target)
else
return method .. ":" .. target
end
@@ -12637,7 +13923,7 @@ end
local function resolve(str) -- use schemes, this one is then for the commandline only
local res = resolved[str]
if not res then
- res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_)
+ res = gsub(str,"([a-z][a-z]+):([^ \"\';]*)",_resolve_) -- home:xx;selfautoparent:xx; etc
resolved[str] = res
abstract[res] = str
end
@@ -12797,7 +14083,7 @@ end
function generators.file(specification)
local path = specification.filename
- local content = resolvers.scanfiles(path)
+ local content = resolvers.scanfiles(path,false,true) -- scan once
resolvers.registerfilehash(path,content,true)
end
@@ -13152,6 +14438,18 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ zip.open = i_limiter.protect(zip.open)
+ limited = true
+ end
+ end
+end)
+
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -13419,11 +14717,11 @@ function resolvers.finders.tree(specification)
end
function resolvers.locators.tree(specification)
- -- TH: resolvers.resolve() call was missing here
- local name = resolvers.resolve(specification.filename)
- if name ~= '' and lfs.isdir(name) then
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
if trace_locating then
- report_trees("locator '%s' found",name)
+ report_trees("locator '%s' found",realname)
end
resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
@@ -13437,6 +14735,8 @@ function resolvers.hashers.tree(specification)
report_trees("analysing '%s'",name)
end
resolvers.methodhandler("hashers",name)
+
+ resolvers.generators.file(specification)
end
resolvers.concatinators.tree = resolvers.concatinators.file
@@ -13596,8 +14896,15 @@ local function loaded(libpaths,name,simple)
end
package.loaders[2] = function(name) -- was [#package.loaders+1]
- if trace_locating then -- mode detail
- report_libraries("! locating '%s'",name)
+ if file.suffix(name) == "" then
+ name = file.addsuffix(name,"lua") -- maybe a list
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s' with forced suffix",name)
+ end
+ else
+ if trace_locating then -- mode detail
+ report_libraries("! locating '%s'",name)
+ end
end
for i=1,#libformats do
local format = libformats[i]
@@ -13804,7 +15111,13 @@ function resolvers.load_tree(tree,resolve)
-- Beware, we need to obey the relocatable autoparent so we
-- set TEXMFCNF to its raw value. This is somewhat tricky when
-- we run a mkii job from within. Therefore, in mtxrun, there
- -- is a resolve applied when we're in mkii/kpse mode.
+ -- is a resolve applied when we're in mkii/kpse mode or when
+ -- --resolve is passed to mtxrun. Maybe we should also set the
+ -- local AUTOPARENT etc. although these are alwasy set new.
+
+ if resolve then
+ resolvers.luacnfspec = resolvers.resolve(resolvers.luacnfspec)
+ end
setenv('SELFAUTOPARENT', newroot)
setenv('SELFAUTODIR', newtree)
@@ -13812,11 +15125,12 @@ function resolvers.load_tree(tree,resolve)
setenv('TEXROOT', newroot)
setenv('TEXOS', texos)
setenv('TEXMFOS', texmfos)
- setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
- setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+ setenv('TEXMFCNF', resolvers.luacnfspec,true) -- already resolved
+ setenv('PATH', newpath .. io.pathseparator .. getenv('PATH'))
report_tds("changing from root '%s' to '%s'",oldroot,newroot)
- report_tds("prepending '%s' to binary path",newpath)
+ report_tds("prepending '%s' to PATH",newpath)
+ report_tds("setting TEXMFCNF to '%s'",resolvers.luacnfspec)
report_tds()
end
end
@@ -13837,6 +15151,7 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
resolvers.listers = resolvers.listers or { }
@@ -13867,10 +15182,10 @@ function resolvers.listers.variables(pattern)
end
end
end
- local env = table.fastcopy(environment)
- local var = table.fastcopy(variables)
- local exp = table.fastcopy(expansions)
- for key, value in table.sortedpairs(configured) do
+ local env = fastcopy(environment)
+ local var = fastcopy(variables)
+ local exp = fastcopy(expansions)
+ for key, value in sortedpairs(configured) do
if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
report_lists(key)
report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
@@ -13879,9 +15194,9 @@ function resolvers.listers.variables(pattern)
report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
- instance.environment = table.fastcopy(env)
- instance.variables = table.fastcopy(var)
- instance.expansions = table.fastcopy(exp)
+ instance.environment = fastcopy(env)
+ instance.variables = fastcopy(var)
+ instance.expansions = fastcopy(exp)
end
function resolvers.listers.configurations(report)
@@ -14168,8 +15483,8 @@ own = { } -- not local, might change
own.libs = { -- order can be made better
'l-string.lua',
- 'l-lpeg.lua',
'l-table.lua',
+ 'l-lpeg.lua',
'l-io.lua',
'l-number.lua',
'l-set.lua',
@@ -14240,7 +15555,7 @@ end
-- End of hack.
-local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local format, gsub, gmatch, match, find = string.format, string.gsub, string.gmatch, string.match, string.find
local concat = table.concat
own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
@@ -14362,7 +15677,7 @@ local helpinfo = [[
--resolve resolve prefixed arguments
--ctxlua run internally (using preloaded libs)
--internal run script using built in libraries (same as --ctxlua)
---locate locate given filename
+--locate locate given filename in database (default) or system (--first --all --detail)
--autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree'
--tree=pathtotree use given texmf tree (default file: 'setuptex.tmf')
@@ -14410,7 +15725,7 @@ local helpinfo = [[
local application = logs.application {
name = "mtxrun",
- banner = "ConTeXt TDS Runner Tool 1.30",
+ banner = "ConTeXt TDS Runner Tool 1.31",
helpinfo = helpinfo,
}
@@ -14455,12 +15770,12 @@ runners.registered = {
xmltools = { 'xmltools.rb', true },
-- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
- pdftrimwhite = { 'pdftrimwhite.pl', false }
+ pdftrimwhite = { 'pdftrimwhite.pl', false },
}
runners.launchers = {
windows = { },
- unix = { }
+ unix = { },
}
-- like runners.libpath("framework"): looks on script's subpath
@@ -14487,7 +15802,7 @@ function runners.prepare()
end
local touchname = environment.argument("iftouched")
if type(touchname) == "string" and touchname ~= "" then
- local oldname, newname = string.split(touchname, ",")
+ local oldname, newname = string.splitup(touchname, ",")
if oldname and newname and oldname ~= "" and newname ~= "" then
if not file.needs_updating(oldname,newname) then
if e_verbose then
@@ -14521,11 +15836,11 @@ function runners.execute_script(fullname,internal,nosplit)
if path ~= "" then
result = fullname
elseif name then
- name = name:gsub("^int[%a]*:",function()
+ name = gsub(name,"^int[%a]*:",function()
internal = true
return ""
end )
- name = name:gsub("^script:","")
+ name = gsub(name,"^script:","")
if suffix == "" and runners.registered[name] and runners.registered[name][1] then
name = runners.registered[name][1]
suffix = file.extname(name)
@@ -14555,13 +15870,15 @@ function runners.execute_script(fullname,internal,nosplit)
environment.ownscript = result
dofile(result)
else
- local quotedresult = result
- if string.match(result,' ') and not string.match(result,"^\".*\"$") then quotedresult = '"' .. result .. '"' end
local binary = runners.applications[file.extname(result)]
+ result = string.quoted(string.unquoted(result))
+ -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then
+ -- result = '"' .. result .. '"'
+ -- end
if binary and binary ~= "" then
- quotedresult = binary .. " " .. quotedresult
+ result = binary .. " " .. result
end
- local command = quotedresult .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
if e_verbose then
report()
report("executing: %s",command)
@@ -14606,7 +15923,7 @@ function runners.execute_program(fullname)
local before, after = environment.splitarguments(fullname)
for k=1,#after do after[k] = resolvers.resolve(after[k]) end
environment.initializearguments(after)
- fullname = fullname:gsub("^bin:","")
+ fullname = gsub(fullname,"^bin:","")
local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
report()
report("executing: %s",command)
@@ -14667,10 +15984,24 @@ function runners.resolve_string(filename)
end
end
-function runners.locate_file(filename)
- -- differs from texmfstart where locate appends .com .exe .bat ... todo
+-- differs from texmfstart where locate appends .com .exe .bat ... todo
+
+function runners.locate_file(filename) -- was given file but only searches in tree
if filename and filename ~= "" then
- runners.report_location(resolvers.findgivenfile(filename))
+ if environment.argument("first") then
+ runners.report_location(resolvers.findfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findfile,filename)
+ elseif environment.argument("all") then
+ local result, status = resolvers.findfiles(filename)
+ if status and environment.argument("detail") then
+ runners.report_location(status)
+ else
+ runners.report_location(result)
+ end
+ else
+ runners.report_location(resolvers.findgivenfile(filename))
+ -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename)
+ end
end
end
@@ -14679,12 +16010,12 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if e_verbose then
- reportline()
- if result and result ~= "" then
- report(result)
- else
- report("not found")
+ if type(result) == "table" then
+ for i=1,#result do
+ if i > 1 then
+ io.write("\n")
+ end
+ io.write(result[i])
end
else
io.write(result)
@@ -14742,8 +16073,8 @@ function resolvers.launch(str)
end
function runners.launch_file(filename)
- instance.allresults = true
trackers.enable("resolvers.locating")
+ local allresults = environment.arguments["all"]
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
@@ -14751,15 +16082,15 @@ function runners.launch_file(filename)
if not pattern or pattern == "" then
report("provide name or --pattern=")
else
- local t = resolvers.findfiles(pattern)
+ local t = resolvers.findfiles(pattern,nil,allresults)
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern,nil,allresults)
end
if not t or #t == 0 then
- t = resolvers.findfiles("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults)
end
if t and #t > 0 then
- if environment.arguments["all"] then
+ if allresults then
for _, v in pairs(t) do
report("launching %s", v)
resolvers.launch(v)
@@ -14774,6 +16105,11 @@ function runners.launch_file(filename)
end
end
+local mtxprefixes = {
+ { "^mtx%-", "mtx-" },
+ { "^mtx%-t%-", "mtx-t-" },
+}
+
function runners.find_mtx_script(filename)
local function found(name)
local path = file.dirname(name)
@@ -14799,24 +16135,27 @@ function runners.find_mtx_script(filename)
return fullname
end
-- mtx- prefix checking
- local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
- -- context namespace, mtx-<filename>
- fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename>s
- fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
- end
- -- context namespace, mtx-<filename minus trailing s>
- fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.findfile(fullname)
- if fullname and fullname ~= "" then
- return fullname
+ for i=1,#mtxprefixes do
+ local mtxprefix = mtxprefixes[i]
+ mtxprefix = find(filename,mtxprefix[1]) and "" or mtxprefix[2]
+ -- context namespace, mtx-<filename>
+ fullname = mtxprefix .. filename
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename>s
+ fullname = mtxprefix .. basename .. "s" .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
+ -- context namespace, mtx-<filename minus trailing s>
+ fullname = mtxprefix .. gsub(basename,"s$","") .. "." .. suffix
+ fullname = found(fullname) or resolvers.findfile(fullname)
+ if fullname and fullname ~= "" then
+ return fullname
+ end
end
-- context namespace, just <filename>
fullname = resolvers.findfile(filename)
@@ -14963,9 +16302,11 @@ instance.lsrmode = environment.argument("lsr") or false
local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename))]
-if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+local e_argument = environment.argument
+
+if e_argument("usekpse") or e_argument("forcekpse") or is_mkii_stub then
- resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+ resolvers.load_tree(e_argument('tree'),true) -- force resolve of TEXMFCNF
os.setenv("engine","")
os.setenv("progname","")
@@ -14978,7 +16319,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
other = "other text files",
}
- local progname = environment.argument("progname") or 'context'
+ local progname = e_argument("progname") or 'context'
local function kpse_initialized()
texconfig.kpse_init = true
@@ -14993,7 +16334,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
local findfile = resolvers.findfile
local showpath = resolvers.showpath
- if environment.argument("forcekpse") then
+ if e_argument("forcekpse") then
function resolvers.findfile(name,kind)
return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
@@ -15002,7 +16343,7 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
return (kpse_initialized():show_path(name)) or ""
end
- elseif environment.argument("usekpse") or is_mkii_stub then
+ elseif e_argument("usekpse") or is_mkii_stub then
resolvers.load()
@@ -15041,12 +16382,12 @@ else
end
end
- resolvers.load_tree(environment.argument('tree'))
+ resolvers.load_tree(e_argument('tree'),e_argument("resolve"))
end
-if environment.argument("selfmerge") then
+if e_argument("selfmerge") then
-- embed used libraries
@@ -15056,27 +16397,27 @@ if environment.argument("selfmerge") then
utilities.merger.selfmerge(own.name,own.libs,{ found })
end
-elseif environment.argument("selfclean") then
+elseif e_argument("selfclean") then
-- remove embedded libraries
runners.loadbase()
utilities.merger.selfclean(own.name)
-elseif environment.argument("selfupdate") then
+elseif e_argument("selfupdate") then
runners.loadbase()
trackers.enable("resolvers.locating")
resolvers.updatescript(own.name,"mtxrun")
-elseif environment.argument("ctxlua") or environment.argument("internal") then
+elseif e_argument("ctxlua") or e_argument("internal") then
-- run a script by loading it (using libs)
runners.loadbase()
ok = runners.execute_script(filename,true)
-elseif environment.argument("script") or environment.argument("scripts") then
+elseif e_argument("script") or e_argument("scripts") then
-- run a script by loading it (using libs), pass args
@@ -15087,109 +16428,110 @@ elseif environment.argument("script") or environment.argument("scripts") then
ok = runners.execute_ctx_script(filename)
end
-elseif environment.argument("execute") then
+elseif e_argument("execute") then
-- execute script
runners.loadbase()
ok = runners.execute_script(filename)
-elseif environment.argument("direct") then
+elseif e_argument("direct") then
-- equals bin:
runners.loadbase()
ok = runners.execute_program(filename)
-elseif environment.argument("edit") then
+elseif e_argument("edit") then
-- edit file
runners.loadbase()
runners.edit_script(filename)
-elseif environment.argument("launch") then
+elseif e_argument("launch") then
runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("makestubs") then
+elseif e_argument("makestubs") then
-- make stubs (depricated)
runners.handle_stubs(true)
-elseif environment.argument("removestubs") then
+elseif e_argument("removestubs") then
-- remove stub (depricated)
runners.loadbase()
runners.handle_stubs(false)
-elseif environment.argument("resolve") then
+elseif e_argument("resolve") then
-- resolve string
runners.loadbase()
runners.resolve_string(filename)
-elseif environment.argument("locate") then
+elseif e_argument("locate") then
- -- locate file
+ -- locate file (only database)
runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform") or environment.argument("show-platform") then
+elseif e_argument("platform") or e_argument("show-platform") then
-- locate platform
runners.loadbase()
runners.locate_platform()
-elseif environment.argument("prefixes") then
+elseif e_argument("prefixes") then
runners.loadbase()
runners.prefixes()
-elseif environment.argument("timedrun") then
+elseif e_argument("timedrun") then
-- locate platform
runners.loadbase()
runners.timedrun(filename)
-elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+elseif e_argument("variables") or e_argument("show-variables") or e_argument("expansions") or e_argument("show-expansions") then
-- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
resolvers.load("nofiles")
- resolvers.listers.variables(environment.argument("pattern"))
+ resolvers.listers.variables(e_argument("pattern"))
-elseif environment.argument("configurations") or environment.argument("show-configurations") then
+elseif e_argument("configurations") or e_argument("show-configurations") then
-- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
resolvers.load("nofiles")
resolvers.listers.configurations()
-elseif environment.argument("find-file") then
+elseif e_argument("find-file") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
resolvers.load()
- local e_pattern = environment.argument("pattern")
- local e_format = environment.argument("format")
+ local e_all = e_argument("all")
+ local e_pattern = e_argument("pattern")
+ local e_format = e_argument("format")
+ local finder = e_all and resolvers.findfiles or resolvers.findfile
if not e_pattern then
runners.register_arguments(filename)
environment.initializearguments(environment.arguments_after)
- resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ resolvers.dowithfilesandreport(finder,environment.files,e_format)
elseif type(e_pattern) == "string" then
- instance.allresults = true -- brrrr
- resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ resolvers.dowithfilesandreport(finder,{ e_pattern },e_format)
end
-elseif environment.argument("find-path") then
+elseif e_argument("find-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
@@ -15201,7 +16543,7 @@ elseif environment.argument("find-path") then
print(path)
end
-elseif environment.argument("expand-braces") then
+elseif e_argument("expand-braces") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
@@ -15210,7 +16552,7 @@ elseif environment.argument("expand-braces") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
-elseif environment.argument("expand-path") then
+elseif e_argument("expand-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
@@ -15219,7 +16561,7 @@ elseif environment.argument("expand-path") then
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
-elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+elseif e_argument("expand-var") or e_argument("expand-variable") then
-- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
@@ -15228,7 +16570,7 @@ elseif environment.argument("expand-var") or environment.argument("expand-variab
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
-elseif environment.argument("show-path") or environment.argument("path-value") then
+elseif e_argument("show-path") or e_argument("path-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
@@ -15237,7 +16579,7 @@ elseif environment.argument("show-path") or environment.argument("path-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
-elseif environment.argument("var-value") or environment.argument("show-value") then
+elseif e_argument("var-value") or e_argument("show-value") then
-- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
@@ -15246,28 +16588,36 @@ elseif environment.argument("var-value") or environment.argument("show-value") t
environment.initializearguments(environment.arguments_after)
resolvers.dowithfilesandreport(resolvers.variable,environment.files)
-elseif environment.argument("format-path") then
+elseif e_argument("format-path") then
-- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
resolvers.load()
report(caches.getwritablepath("format"))
-elseif environment.argument("pattern") then
+elseif e_argument("pattern") then
-- luatools
- runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+ runners.execute_ctx_script("mtx-base","--pattern='" .. e_argument("pattern") .. "'",filename)
-elseif environment.argument("generate") then
+elseif e_argument("generate") then
-- luatools
- instance.renewcache = true
- trackers.enable("resolvers.locating")
- resolvers.load()
+ if filename and filename ~= "" then
+ resolvers.load("nofiles")
+ trackers.enable("resolvers.locating")
+ resolvers.renew(filename)
+ else
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+ end
-elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+ e_verbose = true
+
+elseif e_argument("make") or e_argument("ini") or e_argument("compile") then
-- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
@@ -15275,29 +16625,33 @@ elseif environment.argument("make") or environment.argument("ini") or environmen
trackers.enable("resolvers.locating")
environment.make_format(filename)
-elseif environment.argument("run") then
+elseif e_argument("run") then
-- luatools
runners.execute_ctx_script("mtx-base","--run",filename)
-elseif environment.argument("fmt") then
+elseif e_argument("fmt") then
-- luatools
runners.execute_ctx_script("mtx-base","--fmt",filename)
-elseif environment.argument("help") and filename=='base' then
+elseif e_argument("help") and filename=='base' then
-- luatools
runners.execute_ctx_script("mtx-base","--help")
-elseif environment.argument("help") or filename=='help' or filename == "" then
+elseif e_argument("version") then
+
+ application.version()
+
+elseif e_argument("help") or filename=='help' or filename == "" then
application.help()
-elseif filename:find("^bin:") then
+elseif find(filename,"^bin:") then
runners.loadbase()
ok = runners.execute_program(filename)