From d7ccb42582f85acf30568913610ccf4d602023fb Mon Sep 17 00:00:00 2001
From: Taco Hoekwater
Date: Wed, 1 Jun 2011 08:54:21 +0000
Subject: commit context 2011.05.18
git-svn-id: svn://tug.org/texlive/trunk@22719 c570f23f-e606-0410-a88d-b1316a301751
---
.../texmf-dist/scripts/context/stubs/unix/mtxrun | 13162 +++++++++++--------
1 file changed, 7877 insertions(+), 5285 deletions(-)
(limited to 'Master/texmf-dist/scripts/context/stubs/unix/mtxrun')
diff --git a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
index 2bdd1b8f020..cc5ebb478ac 100755
--- a/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
+++ b/Master/texmf-dist/scripts/context/stubs/unix/mtxrun
@@ -8,7 +8,6 @@ if not modules then modules = { } end modules ['mtxrun'] = {
license = "see context related readme files"
}
-
-- one can make a stub:
--
-- #!/bin/sh
@@ -38,8 +37,6 @@ if not modules then modules = { } end modules ['mtxrun'] = {
-- remember for subruns: _CTX_K_S_#{original}_
-- remember for subruns: TEXMFSTART.#{original} [tex.rb texmfstart.rb]
-texlua = true
-
-- begin library merge
@@ -54,8 +51,9 @@ if not modules then modules = { } end modules ['l-string'] = {
license = "see context related readme files"
}
+local string = string
local sub, gsub, find, match, gmatch, format, char, byte, rep, lower = string.sub, string.gsub, string.find, string.match, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch = lpeg.match
+local lpegmatch, S, C, Ct = lpeg.match, lpeg.S, lpeg.C, lpeg.Ct
-- some functions may disappear as they are not used anywhere
@@ -63,196 +61,68 @@ if not string.split then
-- this will be overloaded by a faster lpeg variant
- function string:split(pattern)
- if #self > 0 then
- local t = { }
- for s in gmatch(self..pattern,"(.-)"..pattern) do
- t[#t+1] = s
+ function string.split(str,pattern)
+ local t = { }
+ if #str > 0 then
+ local n = 1
+ for s in gmatch(str..pattern,"(.-)"..pattern) do
+ t[n] = s
+ n = n + 1
end
- return t
- else
- return { }
end
+ return t
end
end
-local chr_to_esc = {
- ["%"] = "%%",
- ["."] = "%.",
- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
- ["^"] = "%^", ["$"] = "%$",
- ["["] = "%[", ["]"] = "%]",
- ["("] = "%(", [")"] = "%)",
- ["{"] = "%{", ["}"] = "%}"
-}
-
-string.chr_to_esc = chr_to_esc
-
-function string:esc() -- variant 2
- return (gsub(self,"(.)",chr_to_esc))
-end
-
-function string:unquote()
- return (gsub(self,"^([\"\'])(.*)%1$","%2"))
+function string.unquoted(str)
+ return (gsub(str,"^([\"\'])(.*)%1$","%2"))
end
---~ function string:unquote()
---~ if find(self,"^[\'\"]") then
---~ return sub(self,2,-2)
---~ else
---~ return self
---~ end
---~ end
-function string:quote() -- we could use format("%q")
- return format("%q",self)
+function string.quoted(str)
+ return format("%q",str) -- always "
end
-function string:count(pattern) -- variant 3
+function string.count(str,pattern) -- variant 3
local n = 0
- for _ in gmatch(self,pattern) do
+ for _ in gmatch(str,pattern) do -- not for utf
n = n + 1
end
return n
end
-function string:limit(n,sentinel)
- if #self > n then
- sentinel = sentinel or " ..."
- return sub(self,1,(n-#sentinel)) .. sentinel
- else
- return self
- end
-end
-
---~ function string:strip() -- the .- is quite efficient
---~ -- return match(self,"^%s*(.-)%s*$") or ""
---~ -- return match(self,'^%s*(.*%S)') or '' -- posted on lua list
---~ return find(s,'^%s*$') and '' or match(s,'^%s*(.*%S)')
---~ end
-
-do -- roberto's variant:
- local space = lpeg.S(" \t\v\n")
- local nospace = 1 - space
- local stripper = space^0 * lpeg.C((space^0 * nospace^1)^0)
- function string.strip(str)
- return lpegmatch(stripper,str) or ""
- end
-end
-
-function string:is_empty()
- return not find(self,"%S")
-end
-
-function string:enhance(pattern,action)
- local ok, n = true, 0
- while ok do
- ok = false
- self = gsub(self,pattern, function(...)
- ok, n = true, n + 1
- return action(...)
- end)
- end
- return self, n
-end
-
-local chr_to_hex, hex_to_chr = { }, { }
-
-for i=0,255 do
- local c, h = char(i), format("%02X",i)
- chr_to_hex[c], hex_to_chr[h] = h, c
-end
-
-function string:to_hex()
- return (gsub(self or "","(.)",chr_to_hex))
-end
-
-function string:from_hex()
- return (gsub(self or "","(..)",hex_to_chr))
-end
-
-if not string.characters then
-
- local function nextchar(str, index)
- index = index + 1
- return (index <= #str) and index or nil, sub(str,index,index)
- end
- function string:characters()
- return nextchar, self, 0
- end
- local function nextbyte(str, index)
- index = index + 1
- return (index <= #str) and index or nil, byte(sub(str,index,index))
- end
- function string:bytes()
- return nextbyte, self, 0
- end
-
-end
-
--- we can use format for this (neg n)
-
-function string:rpadd(n,chr)
- local m = n-#self
- if m > 0 then
- return self .. rep(chr or " ",m)
+function string.limit(str,n,sentinel) -- not utf proof
+ if #str > n then
+ sentinel = sentinel or "..."
+ return sub(str,1,(n-#sentinel)) .. sentinel
else
- return self
- end
-end
-
-function string:lpadd(n,chr)
- local m = n-#self
- if m > 0 then
- return rep(chr or " ",m) .. self
- else
- return self
+ return str
end
end
-string.padd = string.rpadd
+local space = S(" \t\v\n")
+local nospace = 1 - space
+local stripper = space^0 * C((space^0 * nospace^1)^0) -- roberto's code
-function is_number(str) -- tonumber
- return find(str,"^[%-%+]?[%d]-%.?[%d+]$") == 1
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
end
---~ print(is_number("1"))
---~ print(is_number("1.1"))
---~ print(is_number(".1"))
---~ print(is_number("-0.1"))
---~ print(is_number("+0.1"))
---~ print(is_number("-.1"))
---~ print(is_number("+.1"))
-
-function string:split_settings() -- no {} handling, see l-aux for lpeg variant
- if find(self,"=") then
- local t = { }
- for k,v in gmatch(self,"(%a+)=([^%,]*)") do
- t[k] = v
- end
- return t
- else
- return nil
- end
+function string.is_empty(str)
+ return not find(str,"%S")
end
local patterns_escapes = {
- ["-"] = "%-",
- ["."] = "%.",
- ["+"] = "%+",
- ["*"] = "%*",
["%"] = "%%",
- ["("] = "%)",
- [")"] = "%)",
- ["["] = "%[",
- ["]"] = "%]",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%(", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
}
-function string:pattesc()
- return (gsub(self,".",patterns_escapes))
-end
-
local simple_escapes = {
["-"] = "%-",
["."] = "%.",
@@ -260,80 +130,30 @@ local simple_escapes = {
["*"] = ".*",
}
-function string:simpleesc()
- return (gsub(self,".",simple_escapes))
-end
-
-function string:tohash()
- local t = { }
- for s in gmatch(self,"([^, ]+)") do -- lpeg
- t[s] = true
- end
- return t
-end
-
-local pattern = lpeg.Ct(lpeg.C(1)^0)
-
-function string:totable()
- return lpegmatch(pattern,self)
+function string.escapedpattern(str,simple)
+ return (gsub(str,".",simple and simple_escapes or patterns_escapes))
end
---~ local t = {
---~ "1234567123456712345671234567",
---~ "a\tb\tc",
---~ "aa\tbb\tcc",
---~ "aaa\tbbb\tccc",
---~ "aaaa\tbbbb\tcccc",
---~ "aaaaa\tbbbbb\tccccc",
---~ "aaaaaa\tbbbbbb\tcccccc",
---~ }
---~ for k,v do
---~ print(string.tabtospace(t[k]))
---~ end
-
-function string.tabtospace(str,tab)
- -- we don't handle embedded newlines
- while true do
- local s = find(str,"\t")
- if s then
- if not tab then tab = 7 end -- only when found
- local d = tab-(s-1) % tab
- if d > 0 then
- str = gsub(str,"\t",rep(" ",d),1)
- else
- str = gsub(str,"\t","",1)
- end
+function string.topattern(str,lowercase,strict)
+ if str == "" then
+ return ".*"
+ else
+ str = gsub(str,".",simple_escapes)
+ if lowercase then
+ str = lower(str)
+ end
+ if strict then
+ return "^" .. str .. "$"
else
- break
+ return str
end
end
- return str
end
-function string:compactlong() -- strips newlines and leading spaces
- self = gsub(self,"[\n\r]+ *","")
- self = gsub(self,"^ *","")
- return self
-end
-
-function string:striplong() -- strips newlines and leading spaces
- self = gsub(self,"^%s*","")
- self = gsub(self,"[\n\r]+ *","\n")
- return self
-end
+-- obsolete names:
-function string:topattern(lowercase,strict)
- if lowercase then
- self = lower(self)
- end
- self = gsub(self,".",simple_escapes)
- if self == "" then
- self = ".*"
- elseif strict then
- self = "^" .. self .. "$"
- end
- return self
-end
+string.quote = string.quoted
+string.unquote = string.unquoted
end -- of closure
@@ -350,28 +170,75 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
local lpeg = require("lpeg")
+local type = type
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
lpeg.patterns = lpeg.patterns or { } -- so that we can share
local patterns = lpeg.patterns
-local P, R, S, Ct, C, Cs, Cc, V = lpeg.P, lpeg.R, lpeg.S, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.V
-local match = lpeg.match
+local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
+local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc
+local lpegtype = lpeg.type
+
+local utfcharacters = string.utfcharacters
+local utfgmatch = unicode and unicode.utf8.gmatch
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
local digit, sign = R('09'), S('+-')
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local utf8byte = R("\128\191")
+local newline = crlf + cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le"
+ + utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ + utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+
+local utf8next = R("\128\191")
-patterns.utf8byte = utf8byte
patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8byte
-patterns.utf8three = R("\224\239") * utf8byte * utf8byte
-patterns.utf8four = R("\240\244") * utf8byte * utf8byte * utf8byte
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
patterns.digit = digit
patterns.sign = sign
patterns.cardinal = sign^0 * digit^1
patterns.integer = sign^0 * digit^1
patterns.float = sign^0 * digit^0 * P('.') * digit^1
+patterns.cfloat = sign^0 * digit^0 * P(',') * digit^1
patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
patterns.oct = P("0") * R("07")^1
patterns.octal = patterns.oct
patterns.HEX = P("0x") * R("09","AF")^1
@@ -380,16 +247,42 @@ patterns.hexadecimal = P("0x") * R("09","AF","af")^1
patterns.lowercase = R("az")
patterns.uppercase = R("AZ")
patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = S(" ")
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
patterns.eol = S("\n\r")
patterns.spacer = S(" \t\f\v") -- + string.char(0xc2, 0xa0) if we want utf (cf mail roberto)
-patterns.newline = crlf + cr + lf
-patterns.nonspace = 1 - patterns.space
+patterns.newline = newline
+patterns.emptyline = newline^1
patterns.nonspacer = 1 - patterns.spacer
patterns.whitespace = patterns.eol + patterns.spacer
patterns.nonwhitespace = 1 - patterns.whitespace
-patterns.utf8 = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-patterns.utfbom = P('\000\000\254\255') + P('\255\254\000\000') + P('\255\254') + P('\254\255') + P('\239\187\191')
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * patterns.spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"")
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"")
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+local unquoted = Cs(patterns.unquoted * endofstring) -- not C
+
+function string.unquoted(str)
+ return match(unquoted,str) or str
+end
+
function lpeg.anywhere(pattern) --slightly adapted from website
return P { P(pattern) + 1 * V(1) } -- why so complex?
@@ -399,36 +292,18 @@ function lpeg.splitter(pattern, action)
return (((1-P(pattern))^1)/action+1)^0
end
-local spacing = patterns.spacer^0 * patterns.newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-local capture = Ct(content^0)
-
-function string:splitlines()
- return match(capture,self)
-end
-
-patterns.textline = content
-
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more
---~ local p = lpeg.splitat("->",false) print(match(p,"oeps")) -- oeps
---~ local p = lpeg.splitat("->",true) print(match(p,"oeps")) -- oeps
-
local splitters_s, splitters_m = { }, { }
local function splitat(separator,single)
local splitter = (single and splitters_s[separator]) or splitters_m[separator]
if not splitter then
separator = P(separator)
+ local other = C((1 - separator)^0)
if single then
- local other, any = C((1 - separator)^0), P(1)
+ local any = anything
splitter = other * (separator * C(any^0) + "") -- ?
splitters_s[separator] = splitter
else
- local other = C((1 - separator)^0)
splitter = other * (separator * other)^0
splitters_m[separator] = splitter
end
@@ -438,6 +313,7 @@ end
lpeg.splitat = splitat
+
local cache = { }
function lpeg.split(separator,str)
@@ -449,16 +325,39 @@ function lpeg.split(separator,str)
return match(c,str)
end
-function string:split(separator)
+function string.split(str,separator)
local c = cache[separator]
if not c then
c = Ct(splitat(separator))
cache[separator] = c
end
- return match(c,self)
+ return match(c,str)
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+
+local linesplitter = Ct(splitat(newline))
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return match(linesplitter,str)
+end
+
+local utflinesplitter = utfbom^-1 * Ct(splitat(newline))
+
+patterns.utflinesplitter = utflinesplitter
+
+function string.utfsplitlines(str)
+ return match(utflinesplitter,str)
end
-lpeg.splitters = cache
local cache = { }
@@ -466,37 +365,24 @@ function lpeg.checkedsplit(separator,str)
local c = cache[separator]
if not c then
separator = P(separator)
- local other = C((1 - separator)^0)
+ local other = C((1 - separator)^1)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
return match(c,str)
end
-function string:checkedsplit(separator)
+function string.checkedsplit(str,separator)
local c = cache[separator]
if not c then
separator = P(separator)
- local other = C((1 - separator)^0)
+ local other = C((1 - separator)^1)
c = Ct(separator^0 * other * (separator^1 * other)^0)
cache[separator] = c
end
- return match(c,self)
+ return match(c,str)
end
---~ function lpeg.append(list,pp)
---~ local p = pp
---~ for l=1,#list do
---~ if p then
---~ p = p + P(list[l])
---~ else
---~ p = P(list[l])
---~ end
---~ end
---~ return p
---~ end
-
---~ from roberto's site:
local f1 = string.byte
@@ -504,170 +390,459 @@ local function f2(s) local c1, c2 = f1(s,1,2) return c1 * 64 + c2
local function f3(s) local c1, c2, c3 = f1(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
local function f4(s) local c1, c2, c3, c4 = f1(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-patterns.utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-table.join = table.concat
+local utf8byte = patterns.utf8one/f1 + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
-local getmetatable, setmetatable = getmetatable, setmetatable
-local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+patterns.utf8byte = utf8byte
--- Starting with version 5.2 Lua no longer provide ipairs, which makes
--- sense. As we already used the for loop and # in most places the
--- impact on ConTeXt was not that large; the remaining ipairs already
--- have been replaced. In a similar fashio we also hardly used pairs.
---
--- Just in case, we provide the fallbacks as discussed in Programming
--- in Lua (http://www.lua.org/pil/7.3.html):
-if not ipairs then
- -- for k, v in ipairs(t) do ... end
- -- for k=1,#t do local v = t[k] ... end
+local cache = { }
- local function iterate(a,i)
- i = i + 1
- local v = a[i]
- if v ~= nil then
- return i, v --, nil
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
end
-
- function ipairs(a)
- return iterate, a, 0
- end
-
end
-if not pairs then
-
- -- for k, v in pairs(t) do ... end
- -- for k, v in next, t do ... end
+local cache = { }
- function pairs(t)
- return next, t -- , nil
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
end
-
end
--- Also, unpack has been moved to the table table, and for compatiility
--- reasons we provide both now.
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(P(1)^0)
+end
-if not table.unpack then
- table.unpack = _G.unpack
-elseif not unpack then
- _G.unpack = table.unpack
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * P(-1))^0)
end
--- extra functions, some might go (when not used)
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
-function table.strip(tab)
- local lst = { }
- for i=1,#tab do
- local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
- if s == "" then
- -- skip this one
- else
- lst[#lst+1] = s
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
+ end
+ return Cs((p + 1)^0)
end
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
end
- return lst
end
-function table.keys(t)
- local k = { }
- for key, _ in next, t do
- k[#k+1] = key
- end
- return k
-end
+local splitters_f, splitters_s = { }, { }
-local function compare(a,b)
- return (tostring(a) < tostring(b))
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
end
-local function sortedkeys(tab)
- local srt, kind = { }, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- srt[#srt+1] = key
- if kind == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- -- if kind == 2 then kind = 3 else kind = 1 end
- kind = (kind == 2 and 3) or 1
- elseif tkey == "number" then
- -- if kind == 1 then kind = 3 else kind = 2 end
- kind = (kind == 1 and 3) or 2
- else
- kind = 3
- end
- end
- end
- if kind == 0 or kind == 3 then
- sort(srt,compare)
- else
- sort(srt)
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
end
- return srt
+ return splitter
end
-local function sortedhashkeys(tab) -- fast one
- local srt = { }
- for key,_ in next, tab do
- srt[#srt+1] = key
- end
- sort(srt)
- return srt
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
end
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-function table.sortedhash(t)
- local s = sortedhashkeys(t) -- maybe just sortedkeys
- local n = 0
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
-end
-table.sortedpairs = table.sortedhash
+local nany = utf8char/""
-function table.append(t, list)
- for _,v in next, list do
- insert(t,v)
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #match(pattern,str)
end
end
-function table.prepend(t, list)
- for k,v in next, list do
- insert(t,k,v)
- end
-end
+if utfgmatch then
-function table.merge(t, ...) -- first one is target
- t = t or {}
- local lst = {...}
- for i=1,#lst do
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function lpeg.count(str,what) -- replaces string.count
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + nany)^0)
+ cache[p] = p
+ end
+ return #match(p,str)
+ else -- 4 times slower but still faster than / function
+ return #match(Cs((P(what)/" " + nany)^0),str)
+ end
+ end
+
+end
+
+local patterns_escapes = { -- also defines in l-string
+ ["%"] = "%%",
+ ["."] = "%.",
+ ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+ ["["] = "%[", ["]"] = "%]",
+ ["("] = "%)", [")"] = "%)",
+ -- ["{"] = "%{", ["}"] = "%}"
+ -- ["^"] = "%^", ["$"] = "%$",
+}
+
+local simple_escapes = { -- also defines in l-string
+ ["-"] = "%-",
+ ["."] = "%.",
+ ["?"] = ".",
+ ["*"] = ".*",
+}
+
+local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0)
+local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0)
+
+function string.escapedpattern(str,simple)
+ return match(simple and s or p,str)
+end
+
+-- utf extensies
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfcharacters(str) do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p
+ for uc in utfgmatch(str,".") do
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p
+ local f = function(uc)
+ if p then
+ p = p + P(uc)
+ else
+ p = P(uc)
+ end
+ end
+ match((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = Cs(utf8byte) * (Cs(utf8byte) + Cc(false))
+
+local utfchar = unicode and unicode.utf8 and unicode.utf8.char
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = match(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and last - first < 8 then -- a somewhat arbitrary criterium
+ local p
+ for i=first,last do
+ if p then
+ p = p + P(utfchar(i))
+ else
+ p = P(utfchar(i))
+ end
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['l-table'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
+local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
+local getmetatable, setmetatable = getmetatable, setmetatable
+
+-- Starting with version 5.2 Lua no longer provide ipairs, which makes
+-- sense. As we already used the for loop and # in most places the
+-- impact on ConTeXt was not that large; the remaining ipairs already
+-- have been replaced. In a similar fashio we also hardly used pairs.
+--
+-- Just in case, we provide the fallbacks as discussed in Programming
+-- in Lua (http://www.lua.org/pil/7.3.html):
+
+if not ipairs then
+
+ -- for k, v in ipairs(t) do ... end
+ -- for k=1,#t do local v = t[k] ... end
+
+ local function iterate(a,i)
+ i = i + 1
+ local v = a[i]
+ if v ~= nil then
+ return i, v --, nil
+ end
+ end
+
+ function ipairs(a)
+ return iterate, a, 0
+ end
+
+end
+
+if not pairs then
+
+ -- for k, v in pairs(t) do ... end
+ -- for k, v in next, t do ... end
+
+ function pairs(t)
+ return next, t -- , nil
+ end
+
+end
+
+-- Also, unpack has been moved to the table table, and for compatiility
+-- reasons we provide both now.
+
+if not table.unpack then
+ table.unpack = _G.unpack
+elseif not unpack then
+ _G.unpack = table.unpack
+end
+
+-- extra functions, some might go (when not used)
+
+function table.strip(tab)
+ local lst, l = { }, 0
+ for i=1,#tab do
+ local s = gsub(tab[i],"^%s*(.-)%s*$","%1")
+ if s == "" then
+ -- skip this one
+ else
+ l = l + 1
+ lst[l] = s
+ end
+ end
+ return lst
+end
+
+function table.keys(t)
+ local keys, k = { }, 0
+ for key, _ in next, t do
+ k = k + 1
+ keys[k] = key
+ end
+ return keys
+end
+
+local function compare(a,b)
+ local ta, tb = type(a), type(b) -- needed, else 11 < 2
+ if ta == tb then
+ return a < b
+ else
+ return tostring(a) < tostring(b)
+ end
+end
+
+local function sortedkeys(tab)
+ local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+ for key,_ in next, tab do
+ s = s + 1
+ srt[s] = key
+ if category == 3 then
+ -- no further check
+ else
+ local tkey = type(key)
+ if tkey == "string" then
+ category = (category == 2 and 3) or 1
+ elseif tkey == "number" then
+ category = (category == 1 and 3) or 2
+ else
+ category = 3
+ end
+ end
+ end
+ if category == 0 or category == 3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+end
+
+local function sortedhashkeys(tab) -- fast one
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if key then
+ s= s + 1
+ srt[s] = key
+ end
+ end
+ sort(srt)
+ return srt
+end
+
+table.sortedkeys = sortedkeys
+table.sortedhashkeys = sortedhashkeys
+
+local function nothing() end
+
+local function sortedhash(t)
+ if t then
+ local n, s = 0, sortedkeys(t) -- the robust one
+ local function kv(s)
+ n = n + 1
+ local k = s[n]
+ return k, t[k]
+ end
+ return kv, s
+ else
+ return nothing
+ end
+end
+
+table.sortedhash = sortedhash
+table.sortedpairs = sortedhash
+
+function table.append(t, list)
+ local n = #t
+ for i=1,#list do
+ n = n + 1
+ t[n] = list[i]
+ end
+ return t
+end
+
+function table.prepend(t, list)
+ local nl = #list
+ local nt = nl + #t
+ for i=#t,1,-1 do
+ t[nt] = t[i]
+ nt = nt - 1
+ end
+ for i=1,#list do
+ t[i] = list[i]
+ end
+ return t
+end
+
+function table.merge(t, ...) -- first one is target
+ t = t or { }
+ local lst = { ... }
+ for i=1,#lst do
for k, v in next, lst[i] do
t[k] = v
end
@@ -676,7 +851,7 @@ function table.merge(t, ...) -- first one is target
end
function table.merged(...)
- local tmp, lst = { }, {...}
+ local tmp, lst = { }, { ... }
for i=1,#lst do
for k, v in next, lst[i] do
tmp[k] = v
@@ -686,41 +861,45 @@ function table.merged(...)
end
function table.imerge(t, ...)
- local lst = {...}
+ local lst, nt = { ... }, #t
for i=1,#lst do
local nst = lst[i]
for j=1,#nst do
- t[#t+1] = nst[j]
+ nt = nt + 1
+ t[nt] = nst[j]
end
end
return t
end
function table.imerged(...)
- local tmp, lst = { }, {...}
+ local tmp, ntmp, lst = { }, 0, {...}
for i=1,#lst do
local nst = lst[i]
for j=1,#nst do
- tmp[#tmp+1] = nst[j]
+ ntmp = ntmp + 1
+ tmp[ntmp] = nst[j]
end
end
return tmp
end
-local function fastcopy(old) -- fast one
+local function fastcopy(old,metatabletoo) -- fast one
if old then
local new = { }
for k,v in next, old do
if type(v) == "table" then
- new[k] = fastcopy(v) -- was just table.copy
+ new[k] = fastcopy(v,metatabletoo) -- was just table.copy
else
new[k] = v
end
end
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
+ if metatabletoo then
+ -- optional second arg
+ local mt = getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
end
return new
else
@@ -728,6 +907,8 @@ local function fastcopy(old) -- fast one
end
end
+-- todo : copy without metatable
+
local function copy(t, tables) -- taken from lua wiki, slightly adapted
tables = tables or { }
local tcopy = {}
@@ -760,33 +941,14 @@ end
table.fastcopy = fastcopy
table.copy = copy
--- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
-function table.sub(t,i,j)
- return { unpack(t,i,j) }
-end
-
-function table.replace(a,b)
- for k,v in next, b do
- a[k] = v
+function table.derive(parent)
+ local child = { }
+ if parent then
+ setmetatable(child,{ __index = parent })
end
+ return child
end
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
-function table.is_empty(t) -- obolete, use inline code instead
- return not t or not next(t)
-end
-
-function table.one_entry(t) -- obolete, use inline code instead
- local n = next(t)
- return n and not next(t,n)
-end
-
---~ function table.starts_at(t) -- obsolete, not nice anyway
---~ return ipairs(t,1)(t,0)
---~ end
-
function table.tohash(t,value)
local h = { }
if t then
@@ -799,27 +961,19 @@ function table.tohash(t,value)
end
function table.fromhash(t)
- local h = { }
+ local hsh, h = { }, 0
for k, v in next, t do -- no ipairs here
- if v then h[#h+1] = k end
+ if v then
+ h = h + 1
+ hsh[h] = k
+ end
end
- return h
+ return hsh
end
---~ print(table.serialize(t), "\n")
---~ print(table.serialize(t,"name"), "\n")
---~ print(table.serialize(t,false), "\n")
---~ print(table.serialize(t,true), "\n")
---~ print(table.serialize(t,"name",true), "\n")
---~ print(table.serialize(t,"name",true,true), "\n")
-
-table.serialize_functions = true
-table.serialize_compact = true
-table.serialize_inline = true
-
local noquotes, hexify, handle, reduce, compact, inline, functions
-local reserved = table.tohash { -- intercept a language flaw, no reserved words as key
+local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
}
@@ -831,20 +985,23 @@ local function simple_table(t)
n = n + 1
end
if n == #t then
- local tt = { }
+ local tt, nt = { }, 0
for i=1,#t do
local v = t[i]
local tv = type(v)
if tv == "number" then
+ nt = nt + 1
if hexify then
- tt[#tt+1] = format("0x%04X",v)
+ tt[nt] = format("0x%04X",v)
else
- tt[#tt+1] = tostring(v) -- tostring not needed
+ tt[nt] = tostring(v) -- tostring not needed
end
elseif tv == "boolean" then
- tt[#tt+1] = tostring(v)
+ nt = nt + 1
+ tt[nt] = tostring(v)
elseif tv == "string" then
- tt[#tt+1] = format("%q",v)
+ nt = nt + 1
+ tt[nt] = format("%q",v)
else
tt = nil
break
@@ -870,21 +1027,25 @@ local function do_serialize(root,name,depth,level,indexed)
depth = depth .. " "
if indexed then
handle(format("%s{",depth))
- elseif name then
- --~ handle(format("%s%s={",depth,key(name)))
- if type(name) == "number" then -- or find(k,"^%d+$") then
+ else
+ local tn = type(name)
+ if tn == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s[0x%04X]={",depth,name))
else
handle(format("%s[%s]={",depth,name))
end
- elseif noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
- handle(format("%s%s={",depth,name))
+ elseif tn == "string" then
+ if noquotes and not reserved[name] and find(name,"^%a[%w%_]*$") then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn == "boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
else
- handle(format("%s[%q]={",depth,name))
+ handle(format("%s{",depth))
end
- else
- handle(format("%s{",depth))
end
end
-- we could check for k (index) being number (cardinal)
@@ -901,11 +1062,9 @@ local function do_serialize(root,name,depth,level,indexed)
for i=1,#sk do
local k = sk[i]
local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t = type(v)
- if compact and first and type(k) == "number" and k >= first and k <= last then
+ -- circular
+ local t, tk = type(v), type(k)
+ if compact and first and tk == "number" and k >= first and k <= last then
if t == "number" then
if hexify then
handle(format("%s 0x%04X,",depth,v))
@@ -947,17 +1106,18 @@ local function do_serialize(root,name,depth,level,indexed)
handle(format("%s __p__=nil,",depth))
end
elseif t == "number" then
- --~ if hexify then
- --~ handle(format("%s %s=0x%04X,",depth,key(k),v))
- --~ else
- --~ handle(format("%s %s=%s,",depth,key(k),v)) -- %.99g
- --~ end
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
end
+ elseif tk == "boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
+ end
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
if hexify then
handle(format("%s %s=0x%04X,",depth,k,v))
@@ -973,26 +1133,28 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "string" then
if reduce and tonumber(v) then
- --~ handle(format("%s %s=%s,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,v))
else
handle(format("%s [%s]=%s,",depth,k,v))
end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
handle(format("%s %s=%s,",depth,k,v))
else
handle(format("%s [%q]=%s,",depth,k,v))
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),v))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,v))
else
handle(format("%s [%s]=%q,",depth,k,v))
end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
handle(format("%s %s=%q,",depth,k,v))
else
@@ -1001,13 +1163,14 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "table" then
if not next(v) then
- --~ handle(format("%s %s={},",depth,key(k)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={},",depth,k))
else
handle(format("%s [%s]={},",depth,k))
end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
handle(format("%s %s={},",depth,k))
else
@@ -1016,13 +1179,14 @@ local function do_serialize(root,name,depth,level,indexed)
elseif inline then
local st = simple_table(v)
if st then
- --~ handle(format("%s %s={ %s },",depth,key(k),concat(st,", ")))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
else
handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
end
+ elseif tk == "boolean" then -- or find(k,"^%d+$") then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
else
@@ -1035,13 +1199,14 @@ local function do_serialize(root,name,depth,level,indexed)
do_serialize(v,k,depth,level+1)
end
elseif t == "boolean" then
- --~ handle(format("%s %s=%s,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%s,",depth,k,tostring(v)))
end
+ elseif tk == "boolean" then -- or find(k,"^%d+$") then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
handle(format("%s %s=%s,",depth,k,tostring(v)))
else
@@ -1049,35 +1214,37 @@ local function do_serialize(root,name,depth,level,indexed)
end
elseif t == "function" then
if functions then
- --~ handle(format('%s %s=loadstring(%q),',depth,key(k),dump(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=loadstring(%q),",depth,k,dump(v)))
else
handle(format("%s [%s]=loadstring(%q),",depth,k,dump(v)))
end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=loadstring(%q),",depth,tostring(k),dump(v)))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
handle(format("%s %s=loadstring(%q),",depth,k,dump(v)))
else
- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
+ -- handle(format("%s [%q]=loadstring(%q),",depth,k,dump(v)))
+ handle(format("%s [%q]=loadstring(%q),",depth,k,debug.getinfo(v).what == "C" and "C code" or dump(v)))
end
end
else
- --~ handle(format("%s %s=%q,",depth,key(k),tostring(v)))
- if type(k) == "number" then -- or find(k,"^%d+$") then
+ if tk == "number" then -- or find(k,"^%d+$") then
if hexify then
handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
else
handle(format("%s [%s]=%q,",depth,k,tostring(v)))
end
+ elseif tk == "boolean" then -- or find(k,"^%d+$") then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
elseif noquotes and not reserved[k] and find(k,"^%a[%w%_]*$") then
handle(format("%s %s=%q,",depth,k,tostring(v)))
else
handle(format("%s [%q]=%q,",depth,k,tostring(v)))
end
end
- --~ end
- end
+ end
end
if level > 0 then
handle(format("%s},",depth))
@@ -1087,15 +1254,34 @@ end
-- replacing handle by a direct t[#t+1] = ... (plus test) is not much
-- faster (0.03 on 1.00 for zapfino.tma)
-local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
- noquotes = _noquotes
- hexify = _hexify
- handle = _handle or print
- reduce = _reduce or false
- compact = table.serialize_compact
- inline = compact and table.serialize_inline
- functions = table.serialize_functions
+local function serialize(_handle,root,name,specification) -- handle wins
local tname = type(name)
+ if type(specification) == "table" then
+ noquotes = specification.noquotes
+ hexify = specification.hexify
+ handle = _handle or specification.handle or print
+ reduce = specification.reduce or false
+ functions = specification.functions
+ compact = specification.compact
+ inline = specification.inline and compact
+ if functions == nil then
+ functions = true
+ end
+ if compact == nil then
+ compact = true
+ end
+ if inline == nil then
+ inline = compact
+ end
+ else
+ noquotes = false
+ hexify = false
+ handle = _handle or print
+ reduce = false
+ compact = true
+ inline = true
+ functions = true
+ end
if tname == "string" then
if name == "return" then
handle("return {")
@@ -1117,33 +1303,33 @@ local function serialize(root,name,_handle,_reduce,_noquotes,_hexify)
else
handle("t={")
end
- if root and next(root) then
- do_serialize(root,name,"",0,indexed)
+ if root then
+ -- The dummy access will initialize a table that has a delayed initialization
+ -- using a metatable. (maybe explicitly test for metatable)
+ if getmetatable(root) then -- todo: make this an option, maybe even per subtable
+ local dummy = root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_ = nil
+ end
+ -- Let's forget about empty tables.
+ if next(root) then
+ do_serialize(root,name,"",0)
+ end
end
handle("}")
end
---~ name:
---~
---~ true : return { }
---~ false : { }
---~ nil : t = { }
---~ string : string = { }
---~ 'return' : return { }
---~ number : [number] = { }
-function table.serialize(root,name,reduce,noquotes,hexify)
- local t = { }
+function table.serialize(root,name,specification)
+ local t, n = { }, 0
local function flush(s)
- t[#t+1] = s
+ n = n + 1
+ t[n] = s
end
- serialize(root,name,flush,reduce,noquotes,hexify)
+ serialize(flush,root,name,specification)
return concat(t,"\n")
end
-function table.tohandle(handle,root,name,reduce,noquotes,hexify)
- serialize(root,name,handle,reduce,noquotes,hexify)
-end
+table.tohandle = serialize
-- sometimes tables are real use (zapfino extra pro is some 85M) in which
-- case a stepwise serialization is nice; actually, we could consider:
@@ -1154,73 +1340,63 @@ end
--
-- so this is on the todo list
-table.tofile_maxtab = 2*1024
+local maxtab = 2*1024
-function table.tofile(filename,root,name,reduce,noquotes,hexify)
+function table.tofile(filename,root,name,specification)
local f = io.open(filename,'w')
if f then
- local maxtab = table.tofile_maxtab
if maxtab > 1 then
- local t = { }
+ local t, n = { }, 0
local function flush(s)
- t[#t+1] = s
- if #t > maxtab then
+ n = n + 1
+ t[n] = s
+ if n > maxtab then
f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t = { }
+ t, n = { }, 0 -- we could recycle t if needed
end
end
- serialize(root,name,flush,reduce,noquotes,hexify)
+ serialize(flush,root,name,specification)
f:write(concat(t,"\n"),"\n")
else
local function flush(s)
f:write(s,"\n")
end
- serialize(root,name,flush,reduce,noquotes,hexify)
+ serialize(flush,root,name,specification)
end
f:close()
+ io.flush()
end
end
-local function flatten(t,f,complete) -- is this used? meybe a variant with next, ...
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if complete or type(v[1]) == "table" then
- flatten(v,f,complete)
+local function flattened(t,f,depth)
+ if f == nil then
+ f = { }
+ depth = 0xFFFF
+ elseif tonumber(f) then
+ -- assume then only two arguments are given
+ depth = f
+ f = { }
+ elseif not depth then
+ depth = 0xFFFF
+ end
+ for k, v in next, t do
+ if type(k) ~= "number" then
+ if depth > 0 and type(v) == "table" then
+ flattened(v,f,depth-1)
else
- f[#f+1] = v
+ f[k] = v
end
- else
- f[#f+1] = v
end
end
-end
-
-function table.flatten(t)
- local f = { }
- flatten(t,f,true)
- return f
-end
-
-function table.unnest(t) -- bad name
- local f = { }
- flatten(t,f,false)
- return f
-end
-
-table.flatten_one_level = table.unnest
-
--- a better one:
-
-local function flattened(t,f)
- if not f then
- f = { }
- end
- for k, v in next, t do
- if type(v) == "table" then
- flattened(v,f)
+ local n = #f
+ for k=1,#t do
+ local v = t[k]
+ if depth > 0 and type(v) == "table" then
+ flattened(v,f,depth-1)
+ n = #f
else
- f[k] = v
+ n = n + 1
+ f[n] = v
end
end
return f
@@ -1228,49 +1404,27 @@ end
table.flattened = flattened
--- the next three may disappear
-
-function table.remove_value(t,value) -- todo: n
- if value then
- for i=1,#t do
- if t[i] == value then
- remove(t,i)
- -- remove all, so no: return
- end
- end
+local function unnest(t,f) -- only used in mk, for old times sake
+ if not f then -- and only relevant for token lists
+ f = { }
end
-end
-
-function table.insert_before_value(t,value,str)
- if str then
- if value then
- for i=1,#t do
- if t[i] == value then
- insert(t,i,str)
- return
- end
+ for i=1,#t do
+ local v = t[i]
+ if type(v) == "table" then
+ if type(v[1]) == "table" then
+ unnest(v,f)
+ else
+ f[#f+1] = v
end
+ else
+ f[#f+1] = v
end
- insert(t,1,str)
- elseif value then
- insert(t,1,value)
end
+ return f
end
-function table.insert_after_value(t,value,str)
- if str then
- if value then
- for i=1,#t do
- if t[i] == value then
- insert(t,i+1,str)
- return
- end
- end
- end
- t[#t+1] = str
- elseif value then
- t[#t+1] = value
- end
+function table.unnest(t) -- bad name
+ return unnest(t)
end
local function are_equal(a,b,n,m) -- indexed
@@ -1297,7 +1451,7 @@ end
local function identical(a,b) -- assumes same structure
for ka, va in next, a do
- local vb = b[k]
+ local vb = b[ka]
if va == vb then
-- same
elseif type(va) == "table" and type(vb) == "table" then
@@ -1311,8 +1465,8 @@ local function identical(a,b) -- assumes same structure
return true
end
-table.are_equal = are_equal
table.identical = identical
+table.are_equal = are_equal
-- maybe also make a combined one
@@ -1338,89 +1492,84 @@ function table.contains(t, v)
end
function table.count(t)
- local n, e = 0, next(t)
- while e do
- n, e = n + 1, next(t,e)
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
end
return n
end
-function table.swapped(t)
- local s = { }
+function table.swapped(t,s) -- hash
+ local n = { }
+ if s then
+ for k, v in next, s do
+ n[k] = v
+ end
+ end
for k, v in next, t do
- s[v] = k
+ n[v] = k
end
- return s
+ return n
end
---~ function table.are_equal(a,b)
---~ return table.serialize(a) == table.serialize(b)
---~ end
-
-function table.clone(t,p) -- t is optional or nil or table
- if not p then
- t, p = { }, t or { }
- elseif not t then
- t = { }
+function table.reversed(t)
+ if t then
+ local tt, tn = { }, #t
+ if tn > 0 then
+ local ttn = 0
+ for i=tn,1,-1 do
+ ttn = ttn + 1
+ tt[ttn] = t[i]
+ end
+ end
+ return tt
end
- setmetatable(t, { __index = function(_,key) return p[key] end }) -- why not __index = p ?
- return t
end
-function table.hexed(t,seperator)
- local tt = { }
- for i=1,#t do tt[i] = format("0x%04X",t[i]) end
- return concat(tt,seperator or " ")
-end
-
-function table.reverse_hash(h)
- local r = { }
- for k,v in next, h do
- r[v] = lower(gsub(k," ",""))
+function table.sequenced(t,sep,simple) -- hash only
+ local s, n = { }, 0
+ for k, v in sortedhash(t) do
+ if simple then
+ if v == true then
+ n = n + 1
+ s[n] = k
+ elseif v and v~= "" then
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ else
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
end
- return r
+ return concat(s, sep or " | ")
end
-function table.reverse(t)
- local tt = { }
- if #t > 0 then
- for i=#t,1,-1 do
- tt[#tt+1] = t[i]
- end
+function table.print(t,...)
+ if type(t) ~= "table" then
+ print(tostring(t))
+ else
+ table.tohandle(print,t,...)
end
- return tt
end
-function table.insert_before_value(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i] == value then
- insert(t,i,extra)
- return
- end
- end
- insert(t,1,extra)
+-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
+
+-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
+
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
end
-function table.insert_after_value(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i] == value then
- insert(t,i+1,extra)
- return
- end
- end
- insert(t,#t+1,extra)
+-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
+
+function table.is_empty(t)
+ return not t or not next(t)
end
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
+end
end -- of closure
@@ -1435,7 +1584,10 @@ if not modules then modules = { } end modules ['l-io'] = {
license = "see context related readme files"
}
-local byte, find, gsub = string.byte, string.find, string.gsub
+local io = io
+local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
+local concat = table.concat
+local type = type
if string.find(os.getenv("PATH"),";") then
io.fileseparator, io.pathseparator = "\\", ";"
@@ -1446,9 +1598,7 @@ end
function io.loaddata(filename,textmode)
local f = io.open(filename,(textmode and 'r') or 'rb')
if f then
- -- collectgarbage("step") -- sometimes makes a big difference in mem consumption
local data = f:read('*all')
- -- garbagecollector.check(data)
f:close()
return data
else
@@ -1460,13 +1610,14 @@ function io.savedata(filename,data,joiner)
local f = io.open(filename,"wb")
if f then
if type(data) == "table" then
- f:write(table.join(data,joiner or ""))
+ f:write(concat(data,joiner or ""))
elseif type(data) == "function" then
data(f)
else
f:write(data or "")
end
f:close()
+ io.flush()
return true
else
return false
@@ -1495,12 +1646,19 @@ function io.size(filename)
end
function io.noflines(f)
- local n = 0
- for _ in f:lines() do
- n = n + 1
+ if type(f) == "string" then
+ local f = io.open(filename)
+ local n = f and io.noflines(f) or 0
+ assert(f:close())
+ return n
+ else
+ local n = 0
+ for _ in f:lines() do
+ n = n + 1
+ end
+ f:seek('set',0)
+ return n
end
- f:seek('set',0)
- return n
end
local nextchar = {
@@ -1526,8 +1684,6 @@ local nextchar = {
function io.characters(f,n)
if f then
return nextchar[n or 1], f
- else
- return nil, nil
end
end
@@ -1536,40 +1692,42 @@ local nextbyte = {
local a, b, c, d = f:read(1,1,1,1)
if d then
return byte(a), byte(b), byte(c), byte(d)
- else
- return nil, nil, nil, nil
+ end
+ end,
+ [3] = function(f)
+ local a, b, c = f:read(1,1,1)
+ if b then
+ return byte(a), byte(b), byte(c)
end
end,
[2] = function(f)
local a, b = f:read(1,1)
if b then
return byte(a), byte(b)
- else
- return nil, nil
end
end,
[1] = function (f)
local a = f:read(1)
if a then
return byte(a)
- else
- return nil
end
end,
[-2] = function (f)
local a, b = f:read(1,1)
if b then
return byte(b), byte(a)
- else
- return nil, nil
+ end
+ end,
+ [-3] = function(f)
+ local a, b, c = f:read(1,1,1)
+ if b then
+ return byte(c), byte(b), byte(a)
end
end,
[-4] = function(f)
local a, b, c, d = f:read(1,1,1,1)
if d then
return byte(d), byte(c), byte(b), byte(a)
- else
- return nil, nil, nil, nil
end
end
}
@@ -1586,12 +1744,13 @@ function io.ask(question,default,options)
while true do
io.write(question)
if options then
- io.write(string.format(" [%s]",table.concat(options,"|")))
+ io.write(format(" [%s]",concat(options,"|")))
end
if default then
- io.write(string.format(" [%s]",default))
+ io.write(format(" [%s]",default))
end
- io.write(string.format(" "))
+ io.write(format(" "))
+ io.flush()
local answer = io.read()
answer = gsub(answer,"^%s*(.*)%s*$","%1")
if answer == "" and default then
@@ -1615,6 +1774,63 @@ function io.ask(question,default,options)
end
end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n = m
+ end
+ if n == 1 then
+ return byte(f:read(1))
+ elseif n == 2 then
+ local a, b = byte(f:read(2),1,2)
+ return 256 * a + b
+ elseif n == 3 then
+ local a, b, c = byte(f:read(3),1,3)
+ return 256*256 * a + 256 * b + c
+ elseif n == 4 then
+ local a, b, c, d = byte(f:read(4),1,4)
+ return 256*256*256 * a + 256*256 * b + 256 * c + d
+ elseif n == 8 then
+ local a, b = readnumber(f,4), readnumber(f,4)
+ return 256 * a + b
+ elseif n == 12 then
+ local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
+ return 256*256 * a + 256 * b + c
+ elseif n == -2 then
+ local b, a = byte(f:read(2),1,2)
+ return 256*a + b
+ elseif n == -3 then
+ local c, b, a = byte(f:read(3),1,3)
+ return 256*256 * a + 256 * b + c
+ elseif n == -4 then
+ local d, c, b, a = byte(f:read(4),1,4)
+ return 256*256*256 * a + 256*256 * b + 256*c + d
+ elseif n == -8 then
+ local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256 * a +
+ 256*256*256*256*256*256 * b +
+ 256*256*256*256*256 * c +
+ 256*256*256*256 * d +
+ 256*256*256 * e +
+ 256*256 * f +
+ 256 * g +
+ h
+ else
+ return 0
+ end
+end
+
+io.readnumber = readnumber
+
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n = m
+ end
+ local str = gsub(f:read(n),"%z","")
+ return str
+end
+
end -- of closure
@@ -1628,11 +1844,15 @@ if not modules then modules = { } end modules ['l-number'] = {
license = "see context related readme files"
}
+-- this module will be replaced when we have the bit library
+
local tostring = tostring
-local format, floor, insert, match = string.format, math.floor, table.insert, string.match
+local format, floor, match, rep = string.format, math.floor, string.match, string.rep
+local concat, insert = table.concat, table.insert
local lpegmatch = lpeg.match
-number = number or { }
+number = number or { }
+local number = number
-- a,b,c,d,e,f = number.toset(100101)
@@ -1680,6 +1900,54 @@ function number.bits(n,zero)
end
+function number.bit(p)
+ return 2 ^ (p - 1) -- 1-based indexing
+end
+
+function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
+ return x % (p + p) >= p
+end
+
+function number.setbit(x, p)
+ return hasbit(x, p) and x or x + p
+end
+
+function number.clearbit(x, p)
+ return hasbit(x, p) and x - p or x
+end
+
+
+function number.tobitstring(n,m)
+ if n == 0 then
+ if m then
+ rep("00000000",m)
+ else
+ return "00000000"
+ end
+ else
+ local t = { }
+ while n > 0 do
+ insert(t,1,n % 2 > 0 and 1 or 0)
+ n = floor(n/2)
+ end
+ local nn = 8 - #t % 8
+ if nn > 0 and nn < 8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m = m * 8 - #t
+ if m > 0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ end
+end
+
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -1692,6 +1960,8 @@ if not modules then modules = { } end modules ['l-set'] = {
license = "see context related readme files"
}
+-- This will become obsolete when we have the bitset library embedded.
+
set = set or { }
local nums = { }
@@ -1735,10 +2005,11 @@ function set.tolist(n)
if n == 0 or not tabs[n] then
return ""
else
- local t = { }
+ local t, n = { }, 0
for k, v in next, tabs[n] do
if v then
- t[#t+1] = k
+ n = n + 1
+ t[n] = k
end
end
return concat(t," ")
@@ -1756,17 +2027,6 @@ function set.contains(n,s)
end
end
---~ local c = set.create{'aap','noot','mies'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ local c = set.create{'zus','wim','jet'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ print(t['jet'])
---~ print(set.contains(t,'jet'))
---~ print(set.contains(t,'aap'))
@@ -1782,32 +2042,119 @@ if not modules then modules = { } end modules ['l-os'] = {
license = "see context related readme files"
}
+-- This file deals with some operating system issues. Please don't bother me
+-- with the pros and cons of operating systems as they all have their flaws
+-- and benefits. Bashing one of them won't help solving problems and fixing
+-- bugs faster and is a waste of time and energy.
+--
+-- path separators: / or \ ... we can use / everywhere
+-- suffixes : dll so exe ... no big deal
+-- quotes : we can use "" in most cases
+-- expansion : unless "" are used * might give side effects
+-- piping/threads : somewhat different for each os
+-- locations : specific user file locations and settings can change over time
+--
+-- os.type : windows | unix (new, we already guessed os.platform)
+-- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
+-- os.platform : extended os.name with architecture
+
-- maybe build io.flush in os.execute
-local find, format, gsub = string.find, string.format, string.gsub
+local os = os
+local find, format, gsub, upper = string.find, string.format, string.gsub, string.upper
+local concat = table.concat
local random, ceil = math.random, math.ceil
+local rawget, rawset, type, getmetatable, setmetatable, tonumber = rawget, rawset, type, getmetatable, setmetatable, tonumber
+
+-- The following code permits traversing the environment table, at least
+-- in luatex. Internally all environment names are uppercase.
+
+if not os.__getenv__ then
+
+ os.__getenv__ = os.getenv
+ os.__setenv__ = os.setenv
+
+ if os.env then
+
+ local osgetenv = os.getenv
+ local ossetenv = os.setenv
+ local osenv = os.env local _ = osenv.PATH -- initialize the table
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ if type(v) == "table" then
+ v = concat(v,";") -- path
+ end
+ ossetenv(K,v)
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ else
+
+ local ossetenv = os.setenv
+ local osgetenv = os.getenv
+ local osenv = { }
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+
+ os.env = { }
+
+ setmetatable(os.env, { __index = __index, __newindex = __newindex } )
+
+ end
+
+end
+
+-- end of environment hack
-local execute, spawn, exec, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.flush
+local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
function os.execute(...) ioflush() return execute(...) end
function os.spawn (...) ioflush() return spawn (...) end
function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
function os.resultof(command)
- ioflush() -- else messed up logging
local handle = io.popen(command,"r")
- if not handle then
- -- print("unknown command '".. command .. "' in os.resultof")
- return ""
- else
- return handle:read("*all") or ""
- end
+ return handle and handle:read("*all") or ""
end
---~ os.type : windows | unix (new, we already guessed os.platform)
---~ os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
---~ os.platform : extended os.name with architecture
-
if not io.fileseparator then
if find(os.getenv("PATH"),";") then
io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
@@ -1856,16 +2203,11 @@ function os.runtime()
return os.gettimeofday() - startuptime
end
---~ print(os.gettimeofday()-os.time())
---~ os.sleep(1.234)
---~ print (">>",os.runtime())
---~ print(os.date("%H:%M:%S",os.gettimeofday()))
---~ print(os.date("%H:%M:%S",os.time()))
-- no need for function anymore as we have more clever code and helpers now
-- this metatable trickery might as well disappear
-os.resolvers = os.resolvers or { }
+os.resolvers = os.resolvers or { } -- will become private
local resolvers = os.resolvers
@@ -1878,24 +2220,6 @@ end
setmetatable(os,osmt)
-if not os.setenv then
-
- -- we still store them but they won't be seen in
- -- child processes although we might pass them some day
- -- using command concatination
-
- local env, getenv = { }, os.getenv
-
- function os.setenv(k,v)
- env[k] = v
- end
-
- function os.getenv(k)
- return env[k] or getenv(k)
- end
-
-end
-
-- we can use HOSTTYPE on some platforms
local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
@@ -1935,7 +2259,7 @@ elseif os.type == "windows" then
elseif name == "linux" then
function os.resolvers.platform(t,k)
- -- we sometims have HOSTTYPE set so let's check that first
+ -- we sometimes have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
if find(architecture,"x86_64") then
platform = "linux-64"
@@ -2013,10 +2337,10 @@ elseif name == "freebsd" then
elseif name == "kfreebsd" then
function os.resolvers.platform(t,k)
- -- we sometims have HOSTTYPE set so let's check that first
+ -- we sometimes have HOSTTYPE set so let's check that first
local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
if find(architecture,"x86_64") then
- platform = "kfreebsd-64"
+ platform = "kfreebsd-amd64"
else
platform = "kfreebsd-i386"
end
@@ -2091,61 +2415,86 @@ if not modules then modules = { } end modules ['l-file'] = {
-- needs a cleanup
-file = file or { }
+file = file or { }
+local file = file
-local concat = table.concat
-local find, gmatch, match, gsub, sub, char = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char
+local insert, concat = table.insert, table.concat
+local find, gmatch, match, gsub, sub, char, lower = string.find, string.gmatch, string.match, string.gsub, string.sub, string.char, string.lower
local lpegmatch = lpeg.match
+local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
-function file.removesuffix(filename)
- return (gsub(filename,"%.[%a%d]+$",""))
+local P, R, S, C, Cs, Cp, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc
+
+local function dirname(name,default)
+ return match(name,"^(.+)[/\\].-$") or (default or "")
end
-function file.addsuffix(filename, suffix)
- if not suffix or suffix == "" then
- return filename
- elseif not find(filename,"%.[%a%d]+$") then
- return filename .. "." .. suffix
- else
- return filename
- end
+local function basename(name)
+ return match(name,"^.+[/\\](.-)$") or name
end
-function file.replacesuffix(filename, suffix)
- return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+local function nameonly(name)
+ return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
end
-function file.dirname(name,default)
- return match(name,"^(.+)[/\\].-$") or (default or "")
+local function extname(name,default)
+ return match(name,"^.+%.([^/\\]-)$") or default or ""
end
-function file.basename(name)
- return match(name,"^.+[/\\](.-)$") or name
+local function splitname(name)
+ local n, s = match(name,"^(.+)%.([^/\\]-)$")
+ return n or name, s or ""
end
-function file.nameonly(name)
- return (gsub(match(name,"^.+[/\\](.-)$") or name,"%..*$",""))
+file.basename = basename
+file.dirname = dirname
+file.nameonly = nameonly
+file.extname = extname
+file.suffix = extname
+
+function file.removesuffix(filename)
+ return (gsub(filename,"%.[%a%d]+$",""))
end
-function file.extname(name,default)
- return match(name,"^.+%.([^/\\]-)$") or default or ""
+function file.addsuffix(filename, suffix, criterium)
+ if not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = splitname(filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = splitname(filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return n .. "." .. suffix
+ end
end
-file.suffix = file.extname
---~ function file.join(...)
---~ local pth = concat({...},"/")
---~ pth = gsub(pth,"\\","/")
---~ local a, b = match(pth,"^(.*://)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ a, b = match(pth,"^(//)(.*)$")
---~ if a and b then
---~ return a .. gsub(b,"//+","/")
---~ end
---~ return (gsub(pth,"//+","/"))
---~ end
+function file.replacesuffix(filename, suffix)
+ return (gsub(filename,"%.[%a%d]+$","")) .. "." .. suffix
+end
+
local trick_1 = char(1)
local trick_2 = "^" .. trick_1 .. "/+"
@@ -2173,60 +2522,37 @@ function file.join(...)
return (gsub(pth,"//+","/"))
end
---~ print(file.join("//","/y"))
---~ print(file.join("/","/y"))
---~ print(file.join("","/y"))
---~ print(file.join("/x/","/y"))
---~ print(file.join("x/","/y"))
---~ print(file.join("http://","/y"))
---~ print(file.join("http://a","/y"))
---~ print(file.join("http:///a","/y"))
---~ print(file.join("//nas-1","/y"))
-function file.iswritable(name)
- local a = lfs.attributes(name) or lfs.attributes(file.dirname(name,"."))
+function file.is_writable(name)
+ local a = attributes(name) or attributes(dirname(name,"."))
return a and sub(a.permissions,2,2) == "w"
end
-function file.isreadable(name)
- local a = lfs.attributes(name)
+function file.is_readable(name)
+ local a = attributes(name)
return a and sub(a.permissions,1,1) == "r"
end
-file.is_readable = file.isreadable
-file.is_writable = file.iswritable
+file.isreadable = file.is_readable -- depricated
+file.iswritable = file.is_writable -- depricated
--- todo: lpeg
-
---~ function file.split_path(str)
---~ local t = { }
---~ str = gsub(str,"\\", "/")
---~ str = gsub(str,"(%a):([;/])", "%1\001%2")
---~ for name in gmatch(str,"([^;:]+)") do
---~ if name ~= "" then
---~ t[#t+1] = gsub(name,"\001",":")
---~ end
---~ end
---~ return t
---~ end
+-- todo: lpeg \\ / .. does not save much
local checkedsplit = string.checkedsplit
-function file.split_path(str,separator)
+function file.splitpath(str,separator) -- string
str = gsub(str,"\\","/")
return checkedsplit(str,separator or io.pathseparator)
end
-function file.join_path(tab)
- return concat(tab,io.pathseparator) -- can have trailing //
+function file.joinpath(tab,separator) -- table
+ return concat(tab,separator or io.pathseparator) -- can have trailing //
end
-- we can hash them weakly
-local insert = table.insert
-
-function file.collapse_path(str,anchor)
+function file.collapsepath(str,anchor)
if anchor and not find(str,"^/") and not find(str,"^%a:") then
str = getcurrentdir() .. "/" .. str
end
@@ -2280,62 +2606,15 @@ function file.collapse_path(str,anchor)
return concat(newelements, '/')
end
end
-function file.xcollapse_path(str)
- print('collapse', str)
- if str == "" or str =="." then
- return "."
- elseif find(str,"^%.%.") then
- return str
- elseif not find(str,"%.") then
- return str
- end
- str = gsub(str,"^%./",lfs.currentdir() .. "/") -- ./xx in qualified
- str = gsub(str,"\\","/")
- local oldelements = checkedsplit(str,"/")
- local newelements = { }
- local i = #oldelements
- while i > 0 do
- local element = oldelements[i]
- if element == '.' then
- -- do nothing
- elseif element == '..' then
- local n = i -1
- while n > 0 do
- local element = oldelements[n]
- if element ~= '..' and element ~= '.' then
- oldelements[n] = '.'
- break
- else
- n = n - 1
- end
- end
- if n<1 then
- insert(newelements,1,'..')
- end
- elseif element ~= "" then
- insert(newelements,1,element)
- end
- i = i - 1
- end
- if #newelements == 0 then
- return "."
- elseif find(str,"^/") then
- return "/" .. concat(newelements,'/')
- else
- return concat(newelements, '/')
- end
-end
---~ print(file.collapse_path("/a"))
---~ print(file.collapse_path("a/./b/.."))
---~ print(file.collapse_path("a/aa/../b/bb"))
---~ print(file.collapse_path("a/../.."))
---~ print(file.collapse_path("a/.././././b/.."))
---~ print(file.collapse_path("a/./././b/.."))
---~ print(file.collapse_path("a/b/c/../.."))
-function file.robustname(str)
- return (gsub(str,"[^%a%d%/%-%.\\]+","-"))
+function file.robustname(str,strict)
+ str = gsub(str,"[^%a%d%/%-%.\\]+","-")
+ if strict then
+ return lower(gsub(str,"^%-*(.-)%-*$","%1"))
+ else
+ return str
+ end
end
file.readdata = io.loaddata
@@ -2347,100 +2626,34 @@ end
-- lpeg variants, slightly faster, not always
---~ local period = lpeg.P(".")
---~ local slashes = lpeg.S("\\/")
---~ local noperiod = 1-period
---~ local noslashes = 1-slashes
---~ local name = noperiod^1
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.C(noperiod^1) * -1
-
---~ function file.extname(name)
---~ return lpegmatch(pattern,name) or ""
---~ end
-
---~ local pattern = lpeg.Cs(((period * noperiod^1 * -1)/"" + 1)^1)
-
---~ function file.removesuffix(name)
---~ return lpegmatch(pattern,name)
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.C(noslashes^1) * -1
-
---~ function file.basename(name)
---~ return lpegmatch(pattern,name) or name
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^1 * lpeg.Cp() * noslashes^1 * -1
-
---~ function file.dirname(name)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2)
---~ else
---~ return ""
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.addsuffix(name, suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return name
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * lpeg.Cp() * noperiod^1 * -1
-
---~ function file.replacesuffix(name,suffix)
---~ local p = lpegmatch(pattern,name)
---~ if p then
---~ return sub(name,1,p-2) .. "." .. suffix
---~ else
---~ return name .. "." .. suffix
---~ end
---~ end
-
---~ local pattern = (noslashes^0 * slashes)^0 * lpeg.Cp() * ((noperiod^1 * period)^1 * lpeg.Cp() + lpeg.P(true)) * noperiod^1 * -1
-
---~ function file.nameonly(name)
---~ local a, b = lpegmatch(pattern,name)
---~ if b then
---~ return sub(name,a,b-2)
---~ elseif a then
---~ return sub(name,a)
---~ else
---~ return name
---~ end
---~ end
-
---~ local test = file.extname
---~ local test = file.basename
---~ local test = file.dirname
---~ local test = file.addsuffix
---~ local test = file.replacesuffix
---~ local test = file.nameonly
-
---~ print(1,test("./a/b/c/abd.def.xxx","!!!"))
---~ print(2,test("./../b/c/abd.def.xxx","!!!"))
---~ print(3,test("a/b/c/abd.def.xxx","!!!"))
---~ print(4,test("a/b/c/def.xxx","!!!"))
---~ print(5,test("a/b/c/def","!!!"))
---~ print(6,test("def","!!!"))
---~ print(7,test("def.xxx","!!!"))
-
---~ local tim = os.clock() for i=1,250000 do local ext = test("abd.def.xxx","!!!") end print(os.clock()-tim)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-- also rewrite previous
-local letter = lpeg.R("az","AZ") + lpeg.S("_-+")
-local separator = lpeg.P("://")
+local letter = R("az","AZ") + S("_-+")
+local separator = P("://")
+
+local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
+local rootbased = P("/") + letter*P(":")
-local qualified = lpeg.P(".")^0 * lpeg.P("/") + letter*lpeg.P(":") + letter^1*separator + letter^1 * lpeg.P("/")
-local rootbased = lpeg.P("/") + letter*lpeg.P(":")
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
-- ./name ../name /name c: :// name/name
@@ -2452,14 +2665,16 @@ function file.is_rootbased_path(filename)
return lpegmatch(rootbased,filename) ~= nil
end
-local slash = lpeg.S("\\/")
-local period = lpeg.P(".")
-local drive = lpeg.C(lpeg.R("az","AZ")) * lpeg.P(":")
-local path = lpeg.C(((1-slash)^0 * slash)^0)
-local suffix = period * lpeg.C(lpeg.P(1-period)^0 * lpeg.P(-1))
-local base = lpeg.C((1-suffix)^0)
+-- actually these are schemes
-local pattern = (drive + lpeg.Cc("")) * (path + lpeg.Cc("")) * (base + lpeg.Cc("")) * (suffix + lpeg.Cc(""))
+local slash = S("\\/")
+local period = P(".")
+local drive = C(R("az","AZ")) * P(":")
+local path = C(((1-slash)^0 * slash)^0)
+local suffix = period * C(P(1-period)^0 * P(-1))
+local base = C((1-suffix)^0)
+
+local pattern = (drive + Cc("")) * (path + Cc("")) * (base + Cc("")) * (suffix + Cc(""))
function file.splitname(str) -- returns drive, path, base, suffix
return lpegmatch(pattern,str)
@@ -2472,14 +2687,13 @@ end
-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
---~ -- todo:
---~
---~ if os.type == "windows" then
---~ local currentdir = lfs.currentdir
---~ function lfs.currentdir()
---~ return (gsub(currentdir(),"\\","/"))
---~ end
---~ end
+
+-- for myself:
+
+function file.strip(name,dir)
+ local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
+ return a ~= "" and a or name
+end
end -- of closure
@@ -2495,6 +2709,7 @@ if not modules then modules = { } end modules ['l-md5'] = {
-- This also provides file checksums and checkers.
+local md5, file = md5, file
local gsub, format, byte = string.gsub, string.format, string.byte
local function convert(str,fmt)
@@ -2505,27 +2720,13 @@ if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
---~ if not md5.HEX then
---~ local function remap(chr) return format("%02X",byte(chr)) end
---~ function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.hex then
---~ local function remap(chr) return format("%02x",byte(chr)) end
---~ function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
---~ if not md5.dec then
---~ local function remap(chr) return format("%03i",byte(chr)) end
---~ function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
---~ end
-
-file.needs_updating_threshold = 1
-
-function file.needs_updating(oldname,newname) -- size modification access change
+
+function file.needs_updating(oldname,newname,threshold) -- size modification access change
local oldtime = lfs.attributes(oldname, modification)
local newtime = lfs.attributes(newname, modification)
if newtime >= oldtime then
return false
- elseif oldtime - newtime < file.needs_updating_threshold then
+ elseif oldtime - newtime < (threshold or 1) then
return false
else
return true
@@ -2572,86 +2773,171 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub = string.char, string.gmatch, string.gsub
+local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
+local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch = lpeg.match
+local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
--- from the spec (on the web):
+-- from wikipedia:
--
--- foo://example.com:8042/over/there?name=ferret#nose
--- \_/ \______________/\_________/ \_________/ \__/
--- | | | | |
--- scheme authority path query fragment
--- | _____________________|__
--- / \ / \
--- urn:example:animal:ferret:nose
+-- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
+-- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
+-- | | | | | | | |
+-- | userinfo hostname port | | query fragment
+-- | \________________________________/\_____________|____|/
+-- scheme | | | |
+-- | authority path | |
+-- | | |
+-- | path interpretable as filename
+-- | ___________|____________ |
+-- / \ / \ |
+-- urn:example:animal:ferret:nose interpretable as extension
+
+url = url or { }
+local url = url
+
+local tochar = function(s) return char(tonumber(s,16)) end
+
+local colon = P(":")
+local qmark = P("?")
+local hash = P("#")
+local slash = P("/")
+local percent = P("%")
+local endofstring = P(-1)
+
+local hexdigit = R("09","AF","af")
+local plus = P("+")
+local nothing = Cc("")
+local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-url = url or { }
+-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local function tochar(s)
- return char(tonumber(s,16))
-end
+local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * Cs((escaped+(1- hash))^0) + nothing
+local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
-local colon, qmark, hash, slash, percent, endofstring = lpeg.P(":"), lpeg.P("?"), lpeg.P("#"), lpeg.P("/"), lpeg.P("%"), lpeg.P(-1)
+local validurl = scheme * authority * path * query * fragment
+local parser = Ct(validurl)
-local hexdigit = lpeg.R("09","AF","af")
-local plus = lpeg.P("+")
-local escaped = (plus / " ") + (percent * lpeg.C(hexdigit * hexdigit) / tochar)
+lpegpatterns.url = validurl
+lpegpatterns.urlsplitter = parser
--- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-local scheme = lpeg.Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + lpeg.Cc("")
-local authority = slash * slash * lpeg.Cs((escaped+(1- slash-qmark-hash))^0) + lpeg.Cc("")
-local path = slash * lpeg.Cs((escaped+(1- qmark-hash))^0) + lpeg.Cc("")
-local query = qmark * lpeg.Cs((escaped+(1- hash))^0) + lpeg.Cc("")
-local fragment = hash * lpeg.Cs((escaped+(1- endofstring))^0) + lpeg.Cc("")
+local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+lpegpatterns.urlescaper = escaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
-function url.split(str)
+local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local function hasscheme(str)
+ local scheme = lpegmatch(scheme,str) -- at least one character
+ return scheme and scheme ~= ""
+end
+
-- todo: cache them
-function url.hashed(str)
- local s = url.split(str)
+local rootletter = R("az","AZ")
+ + S("_-+")
+local separator = P("://")
+local qualified = P(".")^0 * P("/")
+ + rootletter * P(":")
+ + rootletter^1 * separator
+ + rootletter^1 * P("/")
+local rootbased = P("/")
+ + rootletter * P(":")
+
+local barswapper = replacer("|",":")
+local backslashswapper = replacer("\\","/")
+
+local function hashed(str) -- not yet ok (/test?test)
+ local s = split(str)
local somescheme = s[1] ~= ""
- return {
- scheme = (somescheme and s[1]) or "file",
- authority = s[2],
- path = s[3],
- query = s[4],
- fragment = s[5],
- original = str,
- noscheme = not somescheme,
- }
+ local somequery = s[4] ~= ""
+ if not somescheme and not somequery then
+ s = {
+ scheme = "file",
+ authority = "",
+ path = str,
+ query = "",
+ fragment = "",
+ original = str,
+ noscheme = true,
+ filename = str,
+ }
+ else -- not always a filename but handy anyway
+ local authority, path, filename = s[2], s[3]
+ if authority == "" then
+ filename = path
+ else
+ filename = authority .. "/" .. path
+ end
+ s = {
+ scheme = s[1],
+ authority = authority,
+ path = path,
+ query = s[4],
+ fragment = s[5],
+ original = str,
+ noscheme = false,
+ filename = filename,
+ }
+ end
+ return s
end
-function url.hasscheme(str)
- return url.split(str)[1] ~= ""
-end
+-- Here we assume:
+--
+-- files: /// = relative
+-- files: //// = absolute (!)
+
-function url.addscheme(str,scheme)
- return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
+
+url.split = split
+url.hasscheme = hasscheme
+url.hashed = hashed
+
+function url.addscheme(str,scheme) -- no authority
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///" .. str
+ else
+ return scheme .. ":///" .. str
+ end
end
-function url.construct(hash)
- local fullurl = hash.sheme .. "://".. hash.authority .. hash.path
- if hash.query then
- fullurl = fullurl .. "?".. hash.query
+function url.construct(hash) -- dodo: we need to escape !
+ local fullurl, f = { }, 0
+ local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
+ if scheme and scheme ~= "" then
+ f = f + 1 ; fullurl[f] = scheme .. "://"
+ end
+ if authority and authority ~= "" then
+ f = f + 1 ; fullurl[f] = authority
+ end
+ if path and path ~= "" then
+ f = f + 1 ; fullurl[f] = "/" .. path
+ end
+ if query and query ~= "" then
+ f = f + 1 ; fullurl[f] = "?".. query
end
- if hash.fragment then
- fullurl = fullurl .. "?".. hash.fragment
+ if fragment and fragment ~= "" then
+ f = f + 1 ; fullurl[f] = "#".. fragment
end
- return fullurl
+ return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
- local t = url.hashed(filename)
+ local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
@@ -2667,37 +2953,13 @@ function url.query(str)
end
end
---~ print(url.filename("file:///c:/oeps.txt"))
---~ print(url.filename("c:/oeps.txt"))
---~ print(url.filename("file:///oeps.txt"))
---~ print(url.filename("file:///etc/test.txt"))
---~ print(url.filename("/oeps.txt"))
-
---~ from the spec on the web (sort of):
---~
---~ function test(str)
---~ print(table.serialize(url.hashed(str)))
---~ end
---~
---~ test("%56pass%20words")
---~ test("file:///c:/oeps.txt")
---~ test("file:///c|/oeps.txt")
---~ test("file:///etc/oeps.txt")
---~ test("file://./etc/oeps.txt")
---~ test("file:////etc/oeps.txt")
---~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
---~ test("http://www.ietf.org/rfc/rfc2396.txt")
---~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
---~ test("mailto:John.Doe@example.com")
---~ test("news:comp.infosystems.www.servers.unix")
---~ test("tel:+1-816-555-1212")
---~ test("telnet://192.0.2.16:80/")
---~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
---~ test("/etc/passwords")
---~ test("http://www.pragma-ade.com/spaced%20name")
-
---~ test("zip:///oeps/oeps.zip#bla/bla.tex")
---~ test("zip:///oeps/oeps.zip?bla/bla.tex")
+
+
+
+
+
+
+
end -- of closure
@@ -2712,35 +2974,53 @@ if not modules then modules = { } end modules ['l-dir'] = {
license = "see context related readme files"
}
--- dir.expand_name will be merged with cleanpath and collapsepath
+-- dir.expandname will be merged with cleanpath and collapsepath
local type = type
local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local concat, insert, remove = table.concat, table.insert, table.remove
local lpegmatch = lpeg.match
+local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
+
dir = dir or { }
+local dir = dir
+local lfs = lfs
+
+local attributes = lfs.attributes
+local walkdir = lfs.dir
+local isdir = lfs.isdir
+local isfile = lfs.isfile
+local mkdir = lfs.mkdir
+local chdir = lfs.chdir
+local currentdir = lfs.currentdir
-- handy
function dir.current()
- return (gsub(lfs.currentdir(),"\\","/"))
+ return (gsub(currentdir(),"\\","/"))
end
--- optimizing for no string.find (*) does not save time
+-- optimizing for no find (*) does not save time
-local attributes = lfs.attributes
-local walkdir = lfs.dir
-local function glob_pattern(path,patt,recurse,action)
- local ok, scanner
+local lfsisdir = isdir
+
+local function isdir(path)
+ path = gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
+end
+
+lfs.isdir = isdir
+
+local function globpattern(path,patt,recurse,action)
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
+ if isdir(path) then -- lfs.isdir does not like trailing /
+ for name in walkdir(path) do -- lfs.dir accepts trailing /
local full = path .. name
local mode = attributes(full,'mode')
if mode == 'file' then
@@ -2748,15 +3028,15 @@ local function glob_pattern(path,patt,recurse,action)
action(full)
end
elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- glob_pattern(full,patt,recurse,action)
+ globpattern(full,patt,recurse,action)
end
end
end
end
-dir.glob_pattern = glob_pattern
+dir.globpattern = globpattern
-local function collect_pattern(path,patt,recurse,result)
+local function collectpattern(path,patt,recurse,result)
local ok, scanner
result = result or { }
if path == "/" then
@@ -2775,7 +3055,7 @@ local function collect_pattern(path,patt,recurse,result)
result[name] = attr
end
elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- attr.list = collect_pattern(full,patt,recurse)
+ attr.list = collectpattern(full,patt,recurse)
result[name] = attr
end
end
@@ -2783,9 +3063,7 @@ local function collect_pattern(path,patt,recurse,result)
return result
end
-dir.collect_pattern = collect_pattern
-
-local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
+dir.collectpattern = collectpattern
local pattern = Ct {
[1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
@@ -2809,16 +3087,16 @@ local function glob(str,t)
for s=1,#str do
glob(str[s],t)
end
- elseif lfs.isfile(str) then
+ elseif isfile(str) then
t(str)
else
- local split = lpegmatch(pattern,str)
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
if split then
local root, path, base = split[1], split[2], split[3]
local recurse = find(base,"%*%*")
local start = root .. path
local result = lpegmatch(filter,start .. base)
- glob_pattern(start,result,recurse,t)
+ globpattern(start,result,recurse,t)
end
end
else
@@ -2828,12 +3106,15 @@ local function glob(str,t)
glob(str[s],t)
end
return t
- elseif lfs.isfile(str) then
- local t = t or { }
- t[#t+1] = str
- return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1] = str
+ return t
+ else
+ return { str }
+ end
else
- local split = lpegmatch(pattern,str)
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
if split then
local t = t or { }
local action = action or function(name) t[#t+1] = name end
@@ -2841,7 +3122,7 @@ local function glob(str,t)
local recurse = find(base,"%*%*")
local start = root .. path
local result = lpegmatch(filter,start .. base)
- glob_pattern(start,result,recurse,action)
+ globpattern(start,result,recurse,action)
return t
else
return { }
@@ -2852,18 +3133,14 @@ end
dir.glob = glob
---~ list = dir.glob("**/*.tif")
---~ list = dir.glob("/**/*.tif")
---~ list = dir.glob("./**/*.tif")
---~ list = dir.glob("oeps/**/*.tif")
---~ list = dir.glob("/oeps/**/*.tif")
local function globfiles(path,recurse,func,files) -- func == pattern or function
if type(func) == "string" then
- local s = func -- alas, we need this indirect way
+ local s = func
func = function(name) return find(name,s) end
end
files = files or { }
+ local noffiles = #files
for name in walkdir(path) do
if find(name,"^%.") then
--- skip
@@ -2874,12 +3151,9 @@ local function globfiles(path,recurse,func,files) -- func == pattern or function
globfiles(path .. "/" .. name,recurse,func,files)
end
elseif mode == "file" then
- if func then
- if func(name) then
- files[#files+1] = path .. "/" .. name
- end
- else
- files[#files+1] = path .. "/" .. name
+ if not func or func(name) then
+ noffiles = noffiles + 1
+ files[noffiles] = path .. "/" .. name
end
end
end
@@ -2897,17 +3171,15 @@ dir.globfiles = globfiles
-- print(dir.ls("*.tex"))
function dir.ls(pattern)
- return table.concat(glob(pattern),"\n")
+ return concat(glob(pattern),"\n")
end
---~ mkdirs("temp")
---~ mkdirs("a/b/c")
---~ mkdirs(".","/a/b/c")
---~ mkdirs("a","b","c")
local make_indeed = true -- false
-if string.find(os.getenv("PATH"),";") then -- os.type == "windows"
+local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
+
+if onwindows then
function dir.mkdirs(...)
local str, pth, t = "", "", { ... }
@@ -2956,64 +3228,21 @@ if string.find(os.getenv("PATH"),";") then -- os.type == "windows"
else
pth = pth .. "/" .. s
end
- if make_indeed and not lfs.isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- return pth, (lfs.isdir(pth) == true)
- end
-
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("a:"))
---~ print(dir.mkdirs("a:/b/c"))
---~ print(dir.mkdirs("a:b/c"))
---~ print(dir.mkdirs("a:/bbb/c"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
-
- function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
- local first, nothing, last = match(str,"^(//)(//*)(.*)$")
- if first then
- first = dir.current() .. "/"
- end
- if not first then
- first, last = match(str,"^(//)/*(.*)$")
- end
- if not first then
- first, last = match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d = lfs.currentdir()
- if lfs.chdir(first) then
- first = dir.current()
- end
- lfs.chdir(d)
+ if make_indeed and not isdir(pth) then
+ mkdir(pth)
end
end
- if not first then
- first, last = dir.current(), str
- end
- last = gsub(last,"//","/")
- last = gsub(last,"/%./","/")
- last = gsub(last,"^/*","")
- first = gsub(first,"/*$","")
- if last == "" then
- return first
- else
- return first .. "/" .. last
- end
+ return pth, (isdir(pth) == true)
end
+
else
function dir.mkdirs(...)
local str, pth, t = "", "", { ... }
for i=1,#t do
local s = t[i]
- if s ~= "" then
+ if s and s ~= "" then -- we catch nil and false
if str ~= "" then
str = str .. "/" .. s
else
@@ -3031,42 +3260,92 @@ else
else
pth = pth .. "/" .. s
end
- if make_indeed and not first and not lfs.isdir(pth) then
- lfs.mkdir(pth)
+ if make_indeed and not first and not isdir(pth) then
+ mkdir(pth)
end
end
else
pth = "."
for s in gmatch(str,"[^/]+") do
pth = pth .. "/" .. s
- if make_indeed and not lfs.isdir(pth) then
- lfs.mkdir(pth)
+ if make_indeed and not isdir(pth) then
+ mkdir(pth)
+ end
+ end
+ end
+ return pth, (isdir(pth) == true)
+ end
+
+
+end
+
+dir.makedirs = dir.mkdirs
+
+-- we can only define it here as it uses dir.current
+
+if onwindows then
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ first = dir.current() .. "/"
+ end
+ if not first then
+ first, last = match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first, last = match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d = currentdir()
+ if chdir(first) then
+ first = dir.current()
end
+ chdir(d)
end
end
- return pth, (lfs.isdir(pth) == true)
+ if not first then
+ first, last = dir.current(), str
+ end
+ last = gsub(last,"//","/")
+ last = gsub(last,"/%./","/")
+ last = gsub(last,"^/*","")
+ first = gsub(first,"/*$","")
+ if last == "" or last == "." then
+ return first
+ else
+ return first .. "/" .. last
+ end
end
---~ print(dir.mkdirs("","","a","c"))
---~ print(dir.mkdirs("a"))
---~ print(dir.mkdirs("/a/b/c"))
---~ print(dir.mkdirs("/aaa/b/c"))
---~ print(dir.mkdirs("//a/b/c"))
---~ print(dir.mkdirs("///a/b/c"))
---~ print(dir.mkdirs("a/bbb//ccc/"))
+else
- function dir.expand_name(str) -- will be merged with cleanpath and collapsepath
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
if not find(str,"^/") then
- str = lfs.currentdir() .. "/" .. str
+ str = currentdir() .. "/" .. str
end
str = gsub(str,"//","/")
str = gsub(str,"/%./","/")
+ str = gsub(str,"(.)/%.$","%1")
return str
end
end
-dir.makedirs = dir.mkdirs
+file.expandname = dir.expandname -- for convenience
+
+local stack = { }
+
+function dir.push(newdir)
+ insert(stack,lfs.currentdir())
+end
+
+function dir.pop()
+ local d = remove(stack)
+ if d then
+ lfs.chdir(d)
+ end
+ return d
+end
end -- of closure
@@ -3081,12 +3360,17 @@ if not modules then modules = { } end modules ['l-boolean'] = {
license = "see context related readme files"
}
+local type, tonumber = type, tonumber
+
boolean = boolean or { }
+local boolean = boolean
-local type, tonumber = type, tonumber
+-- function boolean.tonumber(b)
+-- return b and 1 or 0 -- test and test and return or return
+-- end
function boolean.tonumber(b)
- if b then return 1 else return 0 end
+ if b then return 1 else return 0 end -- test and return or return
end
function toboolean(str,tolerant)
@@ -3110,7 +3394,9 @@ function toboolean(str,tolerant)
end
end
-function string.is_boolean(str)
+string.toboolean = toboolean
+
+function string.is_boolean(str,default)
if type(str) == "string" then
if str == "true" or str == "yes" or str == "on" or str == "t" then
return true
@@ -3118,15 +3404,7 @@ function string.is_boolean(str)
return false
end
end
- return nil
-end
-
-function boolean.alwaystrue()
- return true
-end
-
-function boolean.falsetrue()
- return false
+ return default
end
@@ -3134,7 +3412,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-math'] = {
+if not modules then modules = { } end modules ['l-unicode'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -3142,228 +3420,290 @@ if not modules then modules = { } end modules ['l-math'] = {
license = "see context related readme files"
}
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
+if not unicode then
-if not math.round then
- function math.round(x)
- return floor(x + 0.5)
- end
-end
+ unicode = { utf8 = { } }
-if not math.div then
- function math.div(n,m)
- return floor(n/m)
- end
-end
+ local floor, char = math.floor, string.char
-if not math.mod then
- function math.mod(n,m)
- return n % m
+ function unicode.utf8.utfchar(n)
+ if n < 0x80 then
+ return char(n)
+ elseif n < 0x800 then
+ return char(
+ 0xC0 + floor(n/0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x10000 then
+ return char(
+ 0xE0 + floor(n/0x1000),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x40000 then
+ return char(
+ 0xF0 + floor(n/0x40000),
+ 0x80 + floor(n/0x1000),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ else
+ -- return char(
+ -- 0xF1 + floor(n/0x1000000),
+ -- 0x80 + floor(n/0x40000),
+ -- 0x80 + floor(n/0x1000),
+ -- 0x80 + (floor(n/0x40) % 0x40),
+ -- 0x80 + (n % 0x40)
+ -- )
+ return "?"
+ end
end
+
end
-local pipi = 2*math.pi/360
+local unicode = unicode
-function math.sind(d)
- return sin(d*pipi)
-end
-
-function math.cosd(d)
- return cos(d*pipi)
-end
-
-function math.tand(d)
- return tan(d*pipi)
-end
+utf = utf or unicode.utf8
+local concat = table.concat
+local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local type = type
-end -- of closure
+local utfsplitlines = string.utfsplitlines
-do -- create closure to overcome 200 locals limit
+-- 0 EF BB BF UTF-8
+-- 1 FF FE UTF-16-little-endian
+-- 2 FE FF UTF-16-big-endian
+-- 3 FF FE 00 00 UTF-32-little-endian
+-- 4 00 00 FE FF UTF-32-big-endian
-if not modules then modules = { } end modules ['l-utils'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+unicode.utfname = {
+ [0] = 'utf-8',
+ [1] = 'utf-16-le',
+ [2] = 'utf-16-be',
+ [3] = 'utf-32-le',
+ [4] = 'utf-32-be'
}
--- hm, quite unreadable
+-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
-local gsub = string.gsub
-local concat = table.concat
-local type, next = type, next
+function unicode.utftype(f)
+ local str = f:read(4)
+ if not str then
+ f:seek('set')
+ return 0
+ -- elseif find(str,"^%z%z\254\255") then -- depricated
+ -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
+ elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
+ return 4
+ -- elseif find(str,"^\255\254%z%z") then -- depricated
+ -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
+ elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
+ return 3
+ elseif find(str,"^\254\255") then
+ f:seek('set',2)
+ return 2
+ elseif find(str,"^\255\254") then
+ f:seek('set',2)
+ return 1
+ elseif find(str,"^\239\187\191") then
+ f:seek('set',3)
+ return 0
+ else
+ f:seek('set')
+ return 0
+ end
+end
-if not utils then utils = { } end
-if not utils.merger then utils.merger = { } end
-if not utils.lua then utils.lua = { } end
-utils.merger.m_begin = "begin library merge"
-utils.merger.m_end = "end library merge"
-utils.merger.pattern =
- "%c+" ..
- "%-%-%s+" .. utils.merger.m_begin ..
- "%c+(.-)%c+" ..
- "%-%-%s+" .. utils.merger.m_end ..
- "%c+"
-function utils.merger._self_fake_()
- return
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. utils.merger.m_begin .. "\n\n" ..
- "-- " .. utils.merger.m_end .. "\n\n"
+local function utf16_to_utf8_be(t)
+ if type(t) == "string" then
+ t = utfsplitlines(str)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in bytepairs(t[i]) do
+ if right then
+ local now = 256*left + right
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
+ end
+ end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
+ return t
end
-function utils.report(...)
- print(...)
+local function utf16_to_utf8_le(t)
+ if type(t) == "string" then
+ t = utfsplitlines(str)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in bytepairs(t[i]) do
+ if right then
+ local now = 256*right + left
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
+ end
+ end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
+ return t
end
-utils.merger.strip_comment = true
-
-function utils.merger._self_load_(name)
- local f, data = io.open(name), ""
- if f then
- utils.report("reading merge from %s",name)
- data = f:read("*all")
- f:close()
- else
- utils.report("unknown file to merge %s",name)
+local function utf32_to_utf8_be(str)
+ if type(t) == "string" then
+ t = utfsplitlines(str)
end
- if data and utils.merger.strip_comment then
- -- saves some 20K
- data = gsub(data,"%-%-~[^\n\r]*[\r\n]", "")
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, -1
+ for a,b in bytepairs(str) do
+ if a and b then
+ if more < 0 then
+ more = 256*256*256*a + 256*256*b
+ else
+ r = r + 1
+ result[t] = utfchar(more + 256*a + b)
+ more = -1
+ end
+ else
+ break
+ end
+ end
+ t[i] = concat(result,"",1,r)
end
- return data or ""
+ return result
end
-function utils.merger._self_save_(name, data)
- if data ~= "" then
- local f = io.open(name,'w')
- if f then
- utils.report("saving merge from %s",name)
- f:write(data)
- f:close()
+local function utf32_to_utf8_le(str)
+ if type(t) == "string" then
+ t = utfsplitlines(str)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, -1
+ for a,b in bytepairs(str) do
+ if a and b then
+ if more < 0 then
+ more = 256*b + a
+ else
+ r = r + 1
+ result[t] = utfchar(more + 256*256*256*b + 256*256*a)
+ more = -1
+ end
+ else
+ break
+ end
end
+ t[i] = concat(result,"",1,r)
end
+ return result
end
-function utils.merger._self_swap_(data,code)
- if data ~= "" then
- return (gsub(data,utils.merger.pattern, function(s)
- return "\n\n" .. "-- "..utils.merger.m_begin .. "\n" .. code .. "\n" .. "-- "..utils.merger.m_end .. "\n\n"
- end, 1))
+unicode.utf32_to_utf8_be = utf32_to_utf8_be
+unicode.utf32_to_utf8_le = utf32_to_utf8_le
+unicode.utf16_to_utf8_be = utf16_to_utf8_be
+unicode.utf16_to_utf8_le = utf16_to_utf8_le
+
+function unicode.utf8_to_utf8(t)
+ return type(t) == "string" and utfsplitlines(t) or t
+end
+
+function unicode.utf16_to_utf8(t,endian)
+ return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+end
+
+function unicode.utf32_to_utf8(t,endian)
+ return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+end
+
+local function little(c)
+ local b = byte(c)
+ if b < 0x10000 then
+ return char(b%256,b/256)
else
- return ""
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
end
end
---~ stripper:
---~
---~ data = gsub(data,"%-%-~[^\n]*\n","")
---~ data = gsub(data,"\n\n+","\n")
-
-function utils.merger._self_libs_(libs,list)
- local result, f, frozen = { }, nil, false
- result[#result+1] = "\n"
- if type(libs) == 'string' then libs = { libs } end
- if type(list) == 'string' then list = { list } end
- local foundpath = nil
- for i=1,#libs do
- local lib = libs[i]
- for j=1,#list do
- local pth = gsub(list[j],"\\","/") -- file.clean_path
- utils.report("checking library path %s",pth)
- local name = pth .. "/" .. lib
- if lfs.isfile(name) then
- foundpath = pth
- end
- end
- if foundpath then break end
+local function big(c)
+ local b = byte(c)
+ if b < 0x10000 then
+ return char(b/256,b%256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
end
- if foundpath then
- utils.report("using library path %s",foundpath)
- local right, wrong = { }, { }
- for i=1,#libs do
- local lib = libs[i]
- local fullname = foundpath .. "/" .. lib
- if lfs.isfile(fullname) then
- -- right[#right+1] = lib
- utils.report("merging library %s",fullname)
- result[#result+1] = "do -- create closure to overcome 200 locals limit"
- result[#result+1] = io.loaddata(fullname,true)
- result[#result+1] = "end -- of closure"
- else
- -- wrong[#wrong+1] = lib
- utils.report("no library %s",fullname)
- end
- end
- if #right > 0 then
- utils.report("merged libraries: %s",concat(right," "))
- end
- if #wrong > 0 then
- utils.report("skipped libraries: %s",concat(wrong," "))
- end
+end
+
+function unicode.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254) .. utfgsub(str,".",little)
else
- utils.report("no valid library path found")
+ return char(254,255) .. utfgsub(str,".",big)
end
- return concat(result, "\n\n")
end
-function utils.merger.selfcreate(libs,list,target)
- if target then
- utils.merger._self_save_(
- target,
- utils.merger._self_swap_(
- utils.merger._self_fake_(),
- utils.merger._self_libs_(libs,list)
- )
- )
+function unicode.utfcodes(str)
+ local t, n = { }, 0
+ for u in utfvalues(str) do
+ n = n + 1
+ t[n] = format("0x%04X",u)
end
+ return concat(t,separator or " ")
end
-function utils.merger.selfmerge(name,libs,list,target)
- utils.merger._self_save_(
- target or name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- utils.merger._self_libs_(libs,list)
- )
- )
+function unicode.ustring(s)
+ return format("U+%05X",type(s) == "number" and s or utfbyte(s))
end
-function utils.merger.selfclean(name)
- utils.merger._self_save_(
- name,
- utils.merger._self_swap_(
- utils.merger._self_load_(name),
- ""
- )
- )
+function unicode.xstring(s)
+ return format("0x%05X",type(s) == "number" and s or utfbyte(s))
end
-function utils.lua.compile(luafile, lucfile, cleanup, strip) -- defaults: cleanup=false strip=true
- -- utils.report("compiling",luafile,"into",lucfile)
- os.remove(lucfile)
- local command = "-o " .. string.quote(lucfile) .. " " .. string.quote(luafile)
- if strip ~= false then
- command = "-s " .. command
- end
- local done = (os.spawn("texluac " .. command) == 0) or (os.spawn("luac " .. command) == 0)
- if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- -- utils.report("removing",luafile)
- os.remove(luafile)
- end
- return done
+
+local lpegmatch = lpeg.match
+local utftype = lpeg.patterns.utftype
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
end
+
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['l-aux'] = {
+if not modules then modules = { } end modules ['l-math'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -3371,606 +3711,2538 @@ if not modules then modules = { } end modules ['l-aux'] = {
license = "see context related readme files"
}
--- for inline, no store split : for s in string.gmatch(str,",* *([^,]+)") do .. end
-
-aux = aux or { }
+local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-local concat, format, gmatch = table.concat, string.format, string.gmatch
-local tostring, type = tostring, type
-local lpegmatch = lpeg.match
+if not math.round then
+ function math.round(x) return floor(x + 0.5) end
+end
-local P, R, V = lpeg.P, lpeg.R, lpeg.V
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
-local escape, left, right = P("\\"), P('{'), P('}')
+if not math.mod then
+ function math.mod(n,m) return n % m end
+end
-lpeg.patterns.balanced = P {
- [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
-}
+local pipi = 2*math.pi/360
-local space = lpeg.P(' ')
-local equal = lpeg.P("=")
-local comma = lpeg.P(",")
-local lbrace = lpeg.P("{")
-local rbrace = lpeg.P("}")
-local nobrace = 1 - (lbrace+rbrace)
-local nested = lpeg.P { lbrace * (nobrace + lpeg.V(1))^0 * rbrace }
-local spaces = space^0
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
-local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
+if not math.odd then
+ function math.odd (n) return n % 2 == 0 end
+ function math.even(n) return n % 2 ~= 0 end
+end
-local key = lpeg.C((1-equal-comma)^1)
-local pattern_a = (space+comma)^0 * (key * equal * value + key * lpeg.C(""))
-local pattern_c = (space+comma)^0 * (key * equal * value)
-local key = lpeg.C((1-space-equal-comma)^1)
-local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + lpeg.C("")))
+end -- of closure
--- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
+do -- create closure to overcome 200 locals limit
-local hash = { }
+if not modules then modules = { } end modules ['util-tab'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
-local function set(key,value) -- using Carg is slower here
- hash[key] = value
-end
+utilities = utilities or {}
+utilities.tables = utilities.tables or { }
+local tables = utilities.tables
-local pattern_a_s = (pattern_a/set)^1
-local pattern_b_s = (pattern_b/set)^1
-local pattern_c_s = (pattern_c/set)^1
+local format, gmatch = string.format, string.gmatch
+local concat, insert, remove = table.concat, table.insert, table.remove
+local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-aux.settings_to_hash_pattern_a = pattern_a_s
-aux.settings_to_hash_pattern_b = pattern_b_s
-aux.settings_to_hash_pattern_c = pattern_c_s
+function tables.definetable(target) -- defines undefined tables
+ local composed, t, n = nil, { }, 0
+ for name in gmatch(target,"([^%.]+)") do
+ n = n + 1
+ if composed then
+ composed = composed .. "." .. name
+ else
+ composed = name
+ end
+ t[n] = format("%s = %s or { }",composed,composed)
+ end
+ return concat(t,"\n")
+end
-function aux.make_settings_to_hash_pattern(set,how)
- if how == "strict" then
- return (pattern_c/set)^1
- elseif how == "tolerant" then
- return (pattern_b/set)^1
- else
- return (pattern_a/set)^1
+function tables.accesstable(target)
+ local t = _G
+ for name in gmatch(target,"([^%.]+)") do
+ t = t[name]
end
+ return t
end
-function aux.settings_to_hash(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- if moretolerant then
- lpegmatch(pattern_b_s,str)
- else
- lpegmatch(pattern_a_s,str)
+function tables.removevalue(t,value) -- todo: n
+ if value then
+ for i=1,#t do
+ if t[i] == value then
+ remove(t,i)
+ -- remove all, so no: return
+ end
end
- return hash
- else
- return { }
end
end
-function aux.settings_to_hash_tolerant(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_b_s,str)
- return hash
- else
- return { }
+function tables.insertbeforevalue(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i,extra)
+ return
+ end
+ end
+ insert(t,1,extra)
end
-function aux.settings_to_hash_strict(str,existing)
- if str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_c_s,str)
- return next(hash) and hash
- else
- return nil
+function tables.insertaftervalue(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i+1,extra)
+ return
+ end
end
+ insert(t,#t+1,extra)
end
-local separator = comma * space^0
-local value = lpeg.P(lbrace * lpeg.C((nobrace + nested)^0) * rbrace) + lpeg.C((nested + (1-comma))^0)
-local pattern = lpeg.Ct(value*(separator*value)^0)
-
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
-
-aux.settings_to_array_pattern = pattern
+-- experimental
--- we could use a weak table as cache
+local function toxml(t,d,result)
+ for k, v in table.sortedpairs(t) do
+ if type(v) == "table" then
+ result[#result+1] = format("%s<%s>",d,k)
+ toxml(v,d.." ",result)
+ result[#result+1] = format("%s%s>",d,k)
+ elseif tonumber(k) then
+ result[#result+1] = format("%s%s",d,k,v,k)
+ else
+ result[#result+1] = format("%s<%s>%s%s>",d,k,tostring(v),k)
+ end
+ end
+end
-function aux.settings_to_array(str)
- if not str or str == "" then
- return { }
+function table.toxml(t,name,nobanner)
+ local noroot = name == false
+ local result = (nobanner or noroot) and { } or { "" }
+ if noroot then
+ toxml( t, "", result)
else
- return lpegmatch(pattern,str)
+ toxml( { [name or "root"] = t }, "", result)
end
+ return concat(result,"\n")
end
-local function set(t,v)
- t[#t+1] = v
-end
-local value = lpeg.P(lpeg.Carg(1)*value) / set
-local pattern = value*(separator*value)^0 * lpeg.Carg(1)
+end -- of closure
-function aux.add_settings_to_array(t,str)
- return lpegmatch(pattern,str,nil,t)
-end
+do -- create closure to overcome 200 locals limit
-function aux.hash_to_string(h,separator,yes,no,strict,omit)
- if h then
- local t, s = { }, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
- for i=1,#s do
- local key = s[i]
- if not omit or not omit[key] then
- local value = h[key]
- if type(value) == "boolean" then
- if yes and no then
- if value then
- t[#t+1] = key .. '=' .. yes
- elseif not strict then
- t[#t+1] = key .. '=' .. no
- end
- elseif value or not strict then
- t[#t+1] = key .. '=' .. tostring(value)
- end
- else
- t[#t+1] = key .. '=' .. value
- end
- end
- end
- return concat(t,separator or ",")
- else
- return ""
- end
-end
+if not modules then modules = { } end modules ['util-sto'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
-function aux.array_to_string(a,separator)
- if a then
- return concat(a,separator or ",")
- else
- return ""
+local setmetatable, getmetatable = setmetatable, getmetatable
+
+utilities = utilities or { }
+utilities.storage = utilities.storage or { }
+local storage = utilities.storage
+
+function storage.mark(t)
+ if not t then
+ texio.write_nl("fatal error: storage '%s' cannot be marked",t)
+ os.exit()
+ end
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
end
+ m.__storage__ = true
+ return t
end
-function aux.settings_to_set(str,t)
+function storage.allocate(t)
t = t or { }
- for s in gmatch(str,"%s*([^,]+)") do
- t[s] = true
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
end
+ m.__storage__ = true
return t
end
-local value = lbrace * lpeg.C((nobrace + nested)^0) * rbrace
-local pattern = lpeg.Ct((space + value)^0)
+function storage.marked(t)
+ local m = getmetatable(t)
+ return m and m.__storage__
+end
-function aux.arguments_to_table(str)
- return lpegmatch(pattern,str)
+function storage.checked(t)
+ if not t then
+ texio.write_nl("fatal error: storage '%s' has not been allocated",t)
+ os.exit()
+ end
+ return t
end
--- temporary here
-function aux.getparameters(self,class,parentclass,settings)
- local sc = self[class]
- if not sc then
- sc = table.clone(self[parent])
- self[class] = sc
+function storage.setinitializer(data,initialize)
+ local m = getmetatable(data) or { }
+ m.__index = function(data,k)
+ m.__index = nil -- so that we can access the entries during initializing
+ initialize()
+ return data[k]
end
- aux.settings_to_hash(settings,sc)
+ setmetatable(data, m)
end
--- temporary here
+local keyisvalue = { __index = function(t,k)
+ t[k] = k
+ return k
+end }
-local digit = lpeg.R("09")
-local period = lpeg.P(".")
-local zero = lpeg.P("0")
-local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
-local stripper = lpeg.Cs((number + 1)^0)
+function storage.sparse(t)
+ t = t or { }
+ setmetatable(t,keyisvalue)
+ return t
+end
---~ local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
---~ collectgarbage("collect")
---~ str = string.rep(sample,10000)
---~ local ts = os.clock()
---~ lpegmatch(stripper,str)
---~ print(#str, os.clock()-ts, lpegmatch(stripper,sample))
+-- table namespace ?
-lpeg.patterns.strip_zeros = stripper
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_ignore() end -- t,k,v
-function aux.strip_zeros(str)
- return lpegmatch(stripper,str)
-end
+local t_empty = { __index = empty }
+local t_self = { __index = self }
+local t_ignore = { __newindex = ignore }
-function aux.definetable(target) -- defines undefined tables
- local composed, t = nil, { }
- for name in gmatch(target,"([^%.]+)") do
- if composed then
- composed = composed .. "." .. name
+function table.setmetatableindex(t,f)
+ local m = getmetatable(t)
+ if m then
+ if f == "empty" then
+ m.__index = f_empty
+ elseif f == "key" then
+ m.__index = f_self
else
- composed = name
+ m.__index = f
+ end
+ else
+ if f == "empty" then
+ setmetatable(t, t_empty)
+ elseif f == "key" then
+ setmetatable(t, t_self)
+ else
+ setmetatable(t,{ __index = f })
end
- t[#t+1] = format("%s = %s or { }",composed,composed)
end
- return concat(t,"\n")
end
-function aux.accesstable(target)
- local t = _G
- for name in gmatch(target,"([^%.]+)") do
- t = t[name]
+function table.setmetatablenewindex(t,f)
+ local m = getmetatable(t)
+ if m then
+ if f == "ignore" then
+ m.__newindex = f_ignore
+ else
+ m.__newindex = f
+ end
+ else
+ if f == "ignore" then
+ setmetatable(t, t_ignore)
+ else
+ setmetatable(t,{ __newindex = f })
+ end
end
- return t
end
---~ function string.commaseparated(str)
---~ return gmatch(str,"([^,%s]+)")
---~ end
+function table.setmetatablecall(t,f)
+ local m = getmetatable(t)
+ if m then
+ m.__call = f
+ else
+ setmetatable(t,{ __call = f })
+ end
+end
--- as we use this a lot ...
+function table.setmetatablekey(t,key,value)
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
+ end
+ m[key] = value
+end
---~ function aux.cachefunction(action,weak)
---~ local cache = { }
---~ if weak then
---~ setmetatable(cache, { __mode = "kv" } )
---~ end
---~ local function reminder(str)
---~ local found = cache[str]
---~ if not found then
---~ found = action(str)
---~ cache[str] = found
---~ end
---~ return found
---~ end
---~ return reminder, cache
---~ end
+function table.getmetatablekey(t,key,value)
+ local m = getmetatable(t)
+ return m and m[key]
+end
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-tra'] = {
+if not modules then modules = { } end modules ['util-mrg'] = {
version = 1.001,
- comment = "companion to trac-tra.mkiv",
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- the tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
-
-local debug = require "debug"
+-- hm, quite unreadable
-local getinfo = debug.getinfo
-local type, next = type, next
+local gsub, format = string.gsub, string.format
local concat = table.concat
-local format, find, lower, gmatch, gsub = string.format, string.find, string.lower, string.gmatch, string.gsub
-
-debugger = debugger or { }
+local type, next = type, next
-local counters = { }
-local names = { }
+utilities = utilities or {}
+utilities.merger = utilities.merger or { } -- maybe mergers
+utilities.report = logs and logs.reporter("system") or print
--- one
+local merger = utilities.merger
-local function hook()
- local f = getinfo(2,"f").func
- local n = getinfo(2,"Sn")
--- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
- if f then
- local cf = counters[f]
- if cf == nil then
- counters[f] = 1
- names[f] = n
- else
- counters[f] = cf + 1
- end
- end
-end
-local function getname(func)
- local n = names[func]
- if n then
- if n.what == "C" then
- return n.name or ''
- else
- -- source short_src linedefined what name namewhat nups func
- local name = n.name or n.namewhat or n.what
- if not name or name == "" then name = "?" end
- return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
- end
- else
- return "unknown"
- end
-end
-function debugger.showstats(printer,threshold)
- printer = printer or texio.write or print
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- printer("\n") -- ugly but ok
- -- table.sort(counters)
- for func, count in next, counters do
- if count > threshold then
- local name = getname(func)
- if not find(name,"for generator") then
- printer(format("%8i %s", count, name))
- total = total + count
- end
- end
- grandtotal = grandtotal + count
- functions = functions + 1
- end
- printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
-end
+merger.strip_comment = true
--- two
+local m_begin_merge = "begin library merge"
+local m_end_merge = "end library merge"
+local m_begin_closure = "do -- create closure to overcome 200 locals limit"
+local m_end_closure = "end -- of closure"
---~ local function hook()
---~ local n = getinfo(2)
---~ if n.what=="C" and not n.name then
---~ local f = tostring(debug.traceback())
---~ local cf = counters[f]
---~ if cf == nil then
---~ counters[f] = 1
---~ names[f] = n
---~ else
---~ counters[f] = cf + 1
---~ end
---~ end
---~ end
---~ function debugger.showstats(printer,threshold)
---~ printer = printer or texio.write or print
---~ threshold = threshold or 0
---~ local total, grandtotal, functions = 0, 0, 0
---~ printer("\n") -- ugly but ok
---~ -- table.sort(counters)
---~ for func, count in next, counters do
---~ if count > threshold then
---~ printer(format("%8i %s", count, func))
---~ total = total + count
---~ end
---~ grandtotal = grandtotal + count
---~ functions = functions + 1
---~ end
---~ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
---~ end
+local m_pattern =
+ "%c+" ..
+ "%-%-%s+" .. m_begin_merge ..
+ "%c+(.-)%c+" ..
+ "%-%-%s+" .. m_end_merge ..
+ "%c+"
--- rest
+local m_format =
+ "\n\n-- " .. m_begin_merge ..
+ "\n%s\n" ..
+ "-- " .. m_end_merge .. "\n\n"
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
- end
-end
+local m_faked =
+ "-- " .. "created merged file" .. "\n\n" ..
+ "-- " .. m_begin_merge .. "\n\n" ..
+ "-- " .. m_end_merge .. "\n\n"
-function debugger.enable()
- debug.sethook(hook,"c")
+local function self_fake()
+ return m_faked
end
-function debugger.disable()
- debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
+local function self_nothing()
+ return ""
end
-function debugger.tracing()
- local n = tonumber(os.env['MTX.TRACE.CALLS']) or tonumber(os.env['MTX_TRACE_CALLS']) or 0
- if n > 0 then
- function debugger.tracing() return true end ; return true
+local function self_load(name)
+ local data = io.loaddata(name) or ""
+ if data == "" then
+ utilities.report("merge: unknown file %s",name)
else
- function debugger.tracing() return false end ; return false
+ utilities.report("merge: inserting %s",name)
end
+ return data or ""
end
---~ debugger.enable()
-
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
-
---~ debugger.disable()
-
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-
-setters = setters or { }
-setters.data = setters.data or { }
+local function self_save(name, data)
+ if data ~= "" then
+ if merger.strip_comment then
+ -- saves some 20K
+ local n = #data
+ data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+ utilities.report("merge: %s bytes of comment stripped, %s bytes of code left",n-#data,#data)
+ end
+ io.savedata(name,data)
+ utilities.report("merge: saving %s",name)
+ end
+end
---~ local function set(t,what,value)
---~ local data, done = t.data, t.done
---~ if type(what) == "string" then
---~ what = aux.settings_to_array(what) -- inefficient but ok
---~ end
---~ for i=1,#what do
---~ local w = what[i]
---~ for d, f in next, data do
---~ if done[d] then
---~ -- prevent recursion due to wildcards
---~ elseif find(d,w) then
---~ done[d] = true
---~ for i=1,#f do
---~ f[i](value)
---~ end
---~ end
---~ end
---~ end
---~ end
+local function self_swap(data,code)
+ return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
+end
-local function set(t,what,value)
- local data, done = t.data, t.done
- if type(what) == "string" then
- what = aux.settings_to_hash(what) -- inefficient but ok
- end
- for w, v in next, what do
- if v == "" then
- v = value
- else
- v = toboolean(v)
+local function self_libs(libs,list)
+ local result, f, frozen, foundpath = { }, nil, false, nil
+ result[#result+1] = "\n"
+ if type(libs) == 'string' then libs = { libs } end
+ if type(list) == 'string' then list = { list } end
+ for i=1,#libs do
+ local lib = libs[i]
+ for j=1,#list do
+ local pth = gsub(list[j],"\\","/") -- file.clean_path
+ utilities.report("merge: checking library path %s",pth)
+ local name = pth .. "/" .. lib
+ if lfs.isfile(name) then
+ foundpath = pth
+ end
end
- for d, f in next, data do
- if done[d] then
- -- prevent recursion due to wildcards
- elseif find(d,w) then
- done[d] = true
- for i=1,#f do
- f[i](v)
- end
+ if foundpath then break end
+ end
+ if foundpath then
+ utilities.report("merge: using library path %s",foundpath)
+ local right, wrong = { }, { }
+ for i=1,#libs do
+ local lib = libs[i]
+ local fullname = foundpath .. "/" .. lib
+ if lfs.isfile(fullname) then
+ utilities.report("merge: using library %s",fullname)
+ right[#right+1] = lib
+ result[#result+1] = m_begin_closure
+ result[#result+1] = io.loaddata(fullname,true)
+ result[#result+1] = m_end_closure
+ else
+ utilities.report("merge: skipping library %s",fullname)
+ wrong[#wrong+1] = lib
end
end
+ if #right > 0 then
+ utilities.report("merge: used libraries: %s",concat(right," "))
+ end
+ if #wrong > 0 then
+ utilities.report("merge: skipped libraries: %s",concat(wrong," "))
+ end
+ else
+ utilities.report("merge: no valid library path found")
end
+ return concat(result, "\n\n")
end
-local function reset(t)
- for d, f in next, t.data do
- for i=1,#f do
- f[i](false)
- end
+function merger.selfcreate(libs,list,target)
+ if target then
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
end
end
-local function enable(t,what)
- set(t,what,true)
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
end
-local function disable(t,what)
- local data = t.data
- if not what or what == "" then
- t.done = { }
- reset(t)
- else
- set(t,what,false)
- end
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
end
-function setters.register(t,what,...)
- local data = t.data
- what = lower(what)
- local w = data[what]
- if not w then
- w = { }
- data[what] = w
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['util-lua'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+utilities = utilities or {}
+utilities.lua = utilities.lua or { }
+utilities.report = logs and logs.reporter("system") or print
+
+function utilities.lua.compile(luafile,lucfile,cleanup,strip) -- defaults: cleanup=false strip=true
+ utilities.report("lua: compiling %s into %s",luafile,lucfile)
+ os.remove(lucfile)
+ local command = "-o " .. string.quoted(lucfile) .. " " .. string.quoted(luafile)
+ if strip ~= false then
+ command = "-s " .. command
end
- for _, fnc in next, { ... } do
- local typ = type(fnc)
- if typ == "function" then
- w[#w+1] = fnc
- elseif typ == "string" then
- w[#w+1] = function(value) set(t,fnc,value,nesting) end
- end
+ local done = os.spawn("texluac " .. command) == 0 or os.spawn("luac " .. command) == 0
+ if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ utilities.report("lua: removing %s",luafile)
+ os.remove(luafile)
end
+ return done
end
-function setters.enable(t,what)
- local e = t.enable
- t.enable, t.done = enable, { }
- enable(t,string.simpleesc(tostring(what)))
- t.enable, t.done = e, { }
-end
-function setters.disable(t,what)
- local e = t.disable
- t.disable, t.done = disable, { }
- disable(t,string.simpleesc(tostring(what)))
- t.disable, t.done = e, { }
-end
-function setters.reset(t)
- t.done = { }
- reset(t)
-end
-function setters.list(t) -- pattern
- local list = table.sortedkeys(t.data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['util-prs'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local P, R, V, C, Ct, Carg = lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Carg
+local lpegmatch = lpeg.match
+local concat, format, gmatch = table.concat, string.format, string.gmatch
+local tostring, type, next = tostring, type, next
+
+utilities = utilities or {}
+utilities.parsers = utilities.parsers or { }
+local parsers = utilities.parsers
+parsers.patterns = parsers.patterns or { }
+
+local setmetatableindex = table.setmetatableindex
+local sortedhash = table.sortedhash
+
+-- we could use a Cf Cg construct
+
+local escape, left, right = P("\\"), P('{'), P('}')
+
+lpeg.patterns.balanced = P {
+ [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
+ [2] = left * V(1) * right
+}
+
+local space = P(' ')
+local equal = P("=")
+local comma = P(",")
+local lbrace = P("{")
+local rbrace = P("}")
+local nobrace = 1 - (lbrace+rbrace)
+local nested = P { lbrace * (nobrace + V(1))^0 * rbrace }
+local spaces = space^0
+
+lpeg.patterns.nested = nested
+
+local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
+
+local key = C((1-equal-comma)^1)
+local pattern_a = (space+comma)^0 * (key * equal * value + key * C(""))
+local pattern_c = (space+comma)^0 * (key * equal * value)
+
+local key = C((1-space-equal-comma)^1)
+local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C("")))
+
+-- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
+
+local hash = { }
+
+local function set(key,value)
+ hash[key] = value
+end
+
+local function set(key,value)
+ hash[key] = value
+end
+
+local pattern_a_s = (pattern_a/set)^1
+local pattern_b_s = (pattern_b/set)^1
+local pattern_c_s = (pattern_c/set)^1
+
+parsers.patterns.settings_to_hash_a = pattern_a_s
+parsers.patterns.settings_to_hash_b = pattern_b_s
+parsers.patterns.settings_to_hash_c = pattern_c_s
+
+function parsers.make_settings_to_hash_pattern(set,how)
+ if how == "strict" then
+ return (pattern_c/set)^1
+ elseif how == "tolerant" then
+ return (pattern_b/set)^1
+ else
+ return (pattern_a/set)^1
+ end
+end
+
+function parsers.settings_to_hash(str,existing)
+ if str and str ~= "" then
+ hash = existing or { }
+ lpegmatch(pattern_a_s,str)
+ return hash
+ else
+ return { }
+ end
+end
+
+function parsers.settings_to_hash_tolerant(str,existing)
+ if str and str ~= "" then
+ hash = existing or { }
+ lpegmatch(pattern_b_s,str)
+ return hash
+ else
+ return { }
+ end
+end
+
+function parsers.settings_to_hash_strict(str,existing)
+ if str and str ~= "" then
+ hash = existing or { }
+ lpegmatch(pattern_c_s,str)
+ return next(hash) and hash
+ else
+ return nil
+ end
+end
+
+local separator = comma * space^0
+local value = P(lbrace * C((nobrace + nested)^0) * rbrace) + C((nested + (1-comma))^0)
+local pattern = Ct(value*(separator*value)^0)
+
+-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
+
+parsers.patterns.settings_to_array = pattern
+
+-- we could use a weak table as cache
+
+function parsers.settings_to_array(str)
+ if not str or str == "" then
+ return { }
+ else
+ return lpegmatch(pattern,str)
+ end
+end
+
+local function set(t,v)
+ t[#t+1] = v
+end
+
+local value = P(Carg(1)*value) / set
+local pattern = value*(separator*value)^0 * Carg(1)
+
+function parsers.add_settings_to_array(t,str)
+ return lpegmatch(pattern,str,nil,t)
+end
+
+function parsers.hash_to_string(h,separator,yes,no,strict,omit)
+ if h then
+ local t, tn, s = { }, 0, table.sortedkeys(h)
+ omit = omit and table.tohash(omit)
+ for i=1,#s do
+ local key = s[i]
+ if not omit or not omit[key] then
+ local value = h[key]
+ if type(value) == "boolean" then
+ if yes and no then
+ if value then
+ tn = tn + 1
+ t[tn] = key .. '=' .. yes
+ elseif not strict then
+ tn = tn + 1
+ t[tn] = key .. '=' .. no
+ end
+ elseif value or not strict then
+ tn = tn + 1
+ t[tn] = key .. '=' .. tostring(value)
+ end
+ else
+ tn = tn + 1
+ t[tn] = key .. '=' .. value
+ end
+ end
+ end
+ return concat(t,separator or ",")
+ else
+ return ""
+ end
+end
+
+function parsers.array_to_string(a,separator)
+ if a then
+ return concat(a,separator or ",")
+ else
+ return ""
+ end
+end
+
+function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
+ t = t or { }
+-- for s in gmatch(str,"%s*([^, ]+)") do -- space added
+ for s in gmatch(str,"[^, ]+") do -- space added
+ t[s] = true
+ end
+ return t
+end
+
+function parsers.simple_hash_to_string(h, separator)
+ local t, tn = { }, 0
+ for k, v in sortedhash(h) do
+ if v then
+ tn = tn + 1
+ t[tn] = k
+ end
+ end
+ return concat(t,separator or ",")
+end
+
+local value = lbrace * C((nobrace + nested)^0) * rbrace
+local pattern = Ct((space + value)^0)
+
+function parsers.arguments_to_table(str)
+ return lpegmatch(pattern,str)
+end
+
+-- temporary here (unoptimized)
+
+function parsers.getparameters(self,class,parentclass,settings)
+ local sc = self[class]
+ if not sc then
+ sc = { }
+ self[class] = sc
+ if parentclass then
+ local sp = self[parentclass]
+ if not sp then
+ sp = { }
+ self[parentclass] = sp
+ end
+ setmetatableindex(sc,sp)
+ end
+ end
+ parsers.settings_to_hash(settings,sc)
+end
+
+function parsers.listitem(str)
+ return gmatch(str,"[^, ]+")
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['util-fmt'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+utilities = utilities or { }
+utilities.formatters = utilities.formatters or { }
+local formatters = utilities.formatters
+
+local concat, format = table.concat, string.format
+local tostring, type = tostring, type
+local strip = string.strip
+
+local P, R, Cs = lpeg.P, lpeg.R, lpeg.Cs
+local lpegmatch = lpeg.match
+
+-- temporary here
+
+local digit = R("09")
+local period = P(".")
+local zero = P("0")
+local trailingzeros = zero^0 * -digit -- suggested by Roberto R
+local case_1 = period * trailingzeros / ""
+local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
+local number = digit^1 * (case_1 + case_2)
+local stripper = Cs((number + 1)^0)
+
+
+lpeg.patterns.stripzeros = stripper
+
+function formatters.stripzeros(str)
+ return lpegmatch(stripper,str)
+end
+
+function formatters.formatcolumns(result,between)
+ if result and #result > 0 then
+ between = between or " "
+ local widths, numbers = { }, { }
+ local first = result[1]
+ local n = #first
+ for i=1,n do
+ widths[i] = 0
+ end
+ for i=1,#result do
+ local r = result[i]
+ for j=1,n do
+ local rj = r[j]
+ local tj = type(rj)
+ if tj == "number" then
+ numbers[j] = true
+ end
+ if tj ~= "string" then
+ rj = tostring(rj)
+ r[j] = rj
+ end
+ local w = #rj
+ if w > widths[j] then
+ widths[j] = w
+ end
+ end
+ end
+ for i=1,n do
+ local w = widths[i]
+ if numbers[i] then
+ if w > 80 then
+ widths[i] = "%s" .. between
+ else
+ widths[i] = "%0" .. w .. "i" .. between
+ end
+ else
+ if w > 80 then
+ widths[i] = "%s" .. between
+ elseif w > 0 then
+ widths[i] = "%-" .. w .. "s" .. between
+ else
+ widths[i] = "%s"
+ end
+ end
+ end
+ local template = strip(concat(widths))
+ for i=1,#result do
+ local str = format(template,unpack(result[i]))
+ result[i] = strip(str)
+ end
+ end
+ return result
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['util.deb'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- the tag is kind of generic and used for functions that are not
+-- bound to a variable, like node.new, node.copy etc (contrary to for instance
+-- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+
+local debug = require "debug"
+
+local getinfo = debug.getinfo
+local type, next, tostring = type, next, tostring
+local format, find = string.format, string.find
+local is_boolean = string.is_boolean
+
+utilities = utilities or { }
+utilities.debugger = utilities.debugger or { }
+local debugger = utilities.debugger
+
+local counters = { }
+local names = { }
+
+-- one
+
+local function hook()
+ local f = getinfo(2,"f").func
+ local n = getinfo(2,"Sn")
+-- if n.what == "C" and n.name then print (n.namewhat .. ': ' .. n.name) end
+ if f then
+ local cf = counters[f]
+ if cf == nil then
+ counters[f] = 1
+ names[f] = n
+ else
+ counters[f] = cf + 1
+ end
+ end
+end
+
+local function getname(func)
+ local n = names[func]
+ if n then
+ if n.what == "C" then
+ return n.name or ''
+ else
+ -- source short_src linedefined what name namewhat nups func
+ local name = n.name or n.namewhat or n.what
+ if not name or name == "" then name = "?" end
+ return format("%s : %s : %s", n.short_src or "unknown source", n.linedefined or "--", name)
+ end
+ else
+ return "unknown"
+ end
+end
+
+function debugger.showstats(printer,threshold)
+ printer = printer or texio.write or print
+ threshold = threshold or 0
+ local total, grandtotal, functions = 0, 0, 0
+ printer("\n") -- ugly but ok
+ -- table.sort(counters)
+ for func, count in next, counters do
+ if count > threshold then
+ local name = getname(func)
+ if not find(name,"for generator") then
+ printer(format("%8i %s", count, name))
+ total = total + count
+ end
+ end
+ grandtotal = grandtotal + count
+ functions = functions + 1
+ end
+ printer(format("functions: %s, total: %s, grand total: %s, threshold: %s\n", functions, total, grandtotal, threshold))
+end
+
+-- two
+
+
+-- rest
+
+function debugger.savestats(filename,threshold)
+ local f = io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+
+function debugger.disable()
+ debug.sethook()
+end
+
+
+
+
+
+local is_node = node and node.is_node
+
+function inspect(i) -- global function
+ local ti = type(i)
+ if ti == "table" then
+ table.print(i,"table")
+ elseif is_node and is_node(i) then
+ table.print(nodes.astable(i),tostring(i))
+ else
+ print(tostring(i))
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-inf'] = {
+ version = 1.001,
+ comment = "companion to trac-inf.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- As we want to protect the global tables, we no longer store the timing
+-- in the tables themselves but in a hidden timers table so that we don't
+-- get warnings about assignments. This is more efficient than using rawset
+-- and rawget.
+
+local format = string.format
+local clock = os.gettimeofday or os.clock -- should go in environment
+local write_nl = texio.write_nl
+
+statistics = statistics or { }
+local statistics = statistics
+
+statistics.enable = true
+statistics.threshold = 0.05
+
+local statusinfo, n, registered, timers = { }, 0, { }, { }
+
+local function hastiming(instance)
+ return instance and timers[instance]
+end
+
+local function resettiming(instance)
+ timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
+end
+
+local function starttiming(instance)
+ local timer = timers[instance or "notimer"]
+ if not timer then
+ timer = { }
+ timers[instance or "notimer"] = timer
+ end
+ local it = timer.timing
+ if not it then
+ it = 0
+ end
+ if it == 0 then
+ timer.starttime = clock()
+ if not timer.loadtime then
+ timer.loadtime = 0
+ end
+ end
+ timer.timing = it + 1
+end
+
+local function stoptiming(instance, report)
+ local timer = timers[instance or "notimer"]
+ local it = timer.timing
+ if it > 1 then
+ timer.timing = it - 1
+ else
+ local starttime = timer.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ timer.stoptime = stoptime
+ timer.loadtime = timer.loadtime + loadtime
+ if report then
+ statistics.report("load time %0.3f",loadtime)
+ end
+ timer.timing = 0
+ return loadtime
+ end
+ end
+ return 0
+end
+
+local function elapsedtime(instance)
+ local timer = timers[instance or "notimer"]
+ return format("%0.3f",timer and timer.loadtime or 0)
+end
+
+local function elapsedindeed(instance)
+ local timer = timers[instance or "notimer"]
+ return (timer and timer.loadtime or 0) > statistics.threshold
+end
+
+local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
+ if elapsedindeed(instance) then
+ return format("%s seconds %s", elapsedtime(instance),rest or "")
+ end
+end
+
+statistics.hastiming = hastiming
+statistics.resettiming = resettiming
+statistics.starttiming = starttiming
+statistics.stoptiming = stoptiming
+statistics.elapsedtime = elapsedtime
+statistics.elapsedindeed = elapsedindeed
+statistics.elapsedseconds = elapsedseconds
+
+-- general function
+
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc) == "function" then
+ local rt = registered[tag] or (#statusinfo + 1)
+ statusinfo[rt] = { tag, fnc }
+ registered[tag] = rt
+ if #tag > n then n = #tag end
+ end
+end
+
+function statistics.show(reporter)
+ if statistics.enable then
+ if not reporter then reporter = function(tag,data,n) write_nl(tag .. " " .. data) end end
+ -- this code will move
+ local register = statistics.register
+ register("luatex banner", function()
+ return string.lower(status.banner)
+ end)
+ register("control sequences", function()
+ return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
+ end)
+ register("callbacks", function()
+ local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
+ return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
+ end)
+ collectgarbage("collect")
+ register("current memory usage", statistics.memused)
+ register("runtime",statistics.runtime)
+ for i=1,#statusinfo do
+ local s = statusinfo[i]
+ local r = s[2]()
+ if r then
+ reporter(s[1],r,n)
+ end
+ end
+ write_nl("") -- final newline
+ statistics.enable = false
+ end
+end
+
+local template, report_statistics, nn = nil, nil, 0 -- we only calcute it once
+
+function statistics.showjobstat(tag,data,n)
+ if not logs then
+ -- sorry
+ elseif type(data) == "table" then
+ for i=1,#data do
+ statistics.showjobstat(tag,data[i],n)
+ end
+ else
+ if not template or n > nn then
+ template, n = format("%%-%ss - %%s",n), nn
+ report_statistics = logs.reporter("mkiv lua stats")
+ end
+ report_statistics(format(template,tag,data))
+ end
+end
+
+function statistics.memused() -- no math.round yet -)
+ local round = math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
+end
+
+starttiming(statistics)
+
+function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
+ return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
+end
+
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+
+function statistics.timed(action,report)
+ report = report or logs.reporter("system")
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+
+-- where, not really the best spot for this:
+
+commands = commands or { }
+
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
+
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ tex.sprint(elapsedtime(name or "whatever"))
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local format, find, lower, gsub, escapedpattern = string.format, string.find, string.lower, string.gsub, string.escapedpattern
+local is_boolean = string.is_boolean
+local settings_to_hash = utilities.parsers.settings_to_hash
+local allocate = utilities.storage.allocate
+
+utilities = utilities or { }
+local utilities = utilities
+utilities.setters = utilities.setters or { }
+local setters = utilities.setters
+
+local data = { } -- maybe just local
+
+-- We can initialize from the cnf file. This is sort of tricky as
+-- later defined setters also need to be initialized then. If set
+-- this way, we need to ensure that they are not reset later on.
+
+local trace_initialize = false -- only for testing during development
+
+function setters.initialize(filename,name,values) -- filename only for diagnostics
+ local setter = data[name]
+ if setter then
+ local data = setter.data
+ if data then
+ for key, value in next, values do
+ -- key = gsub(key,"_",".")
+ value = is_boolean(value,value)
+ local functions = data[key]
+ if functions then
+ if #functions > 0 and not functions.value then
+ if trace_initialize then
+ setter.report("executing %s (%s -> %s)",key,filename,tostring(value))
+ end
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ else
+ if trace_initialize then
+ setter.report("skipping %s (%s -> %s)",key,filename,tostring(value))
+ end
+ end
+ else
+ -- we do a simple preregistration i.e. not in the
+ -- list as it might be an obsolete entry
+ functions = { default = value }
+ data[key] = functions
+ if trace_initialize then
+ setter.report("storing %s (%s -> %s)",key,filename,tostring(value))
+ end
+ end
+ end
+ return true
+ end
+ end
+end
+
+-- user interface code
+
+local function set(t,what,newvalue)
+ local data, done = t.data, t.done
+ if type(what) == "string" then
+ what = settings_to_hash(what) -- inefficient but ok
+ end
+ if type(what) ~= "table" then
+ return
+ end
+ for w, value in next, what do
+ if value == "" then
+ value = newvalue
+ elseif not value then
+ value = false -- catch nil
+ else
+ value = is_boolean(value,value)
+ end
+ w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
+ for name, functions in next, data do
+ if done[name] then
+ -- prevent recursion due to wildcards
+ elseif find(name,w) then
+ done[name] = true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ end
+ end
+ end
+end
+
+local function reset(t)
+ for name, functions in next, t.data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value = false
+ end
+end
+
+local function enable(t,what)
+ set(t,what,true)
+end
+
+local function disable(t,what)
+ local data = t.data
+ if not what or what == "" then
+ t.done = { }
+ reset(t)
+ else
+ set(t,what,false)
+ end
+end
+
+function setters.register(t,what,...)
+ local data = t.data
+ what = lower(what)
+ local functions = data[what]
+ if not functions then
+ functions = { }
+ data[what] = functions
+ if trace_initialize then
+ t.report("defining %s",what)
+ end
+ end
+ local default = functions.default -- can be set from cnf file
+ for _, fnc in next, { ... } do
+ local typ = type(fnc)
+ if typ == "string" then
+ if trace_initialize then
+ t.report("coupling %s to %s",what,fnc)
+ end
+ local s = fnc -- else wrong reference
+ fnc = function(value) set(t,s,value) end
+ elseif typ ~= "function" then
+ fnc = nil
+ end
+ if fnc then
+ functions[#functions+1] = fnc
+ -- default: set at command line or in cnf file
+ -- value : set in tex run (needed when loading runtime)
+ local value = functions.value or default
+ if value ~= nil then
+ fnc(value)
+ functions.value = value
+ end
+ end
+ end
+ return false -- so we can use it in an assignment
+end
+
+function setters.enable(t,what)
+ local e = t.enable
+ t.enable, t.done = enable, { }
+ enable(t,what)
+ t.enable, t.done = e, { }
+end
+
+function setters.disable(t,what)
+ local e = t.disable
+ t.disable, t.done = disable, { }
+ disable(t,what)
+ t.disable, t.done = e, { }
+end
+
+function setters.reset(t)
+ t.done = { }
+ reset(t)
+end
+
+function setters.list(t) -- pattern
+ local list = table.sortedkeys(t.data)
+ local user, system = { }, { }
+ for l=1,#list do
+ local what = list[l]
+ if find(what,"^%*") then
system[#system+1] = what
else
- user[#user+1] = what
+ user[#user+1] = what
+ end
+ end
+ return user, system
+end
+
+function setters.show(t)
+ local category = t.name
+ local list = setters.list(t)
+ t.report()
+ for k=1,#list do
+ local name = list[k]
+ local functions = t.data[name]
+ if functions then
+ local value, default, modules = functions.value, functions.default, #functions
+ value = value == nil and "unset" or tostring(value)
+ default = default == nil and "unset" or tostring(default)
+ t.report("%-30s modules: %2i default: %6s value: %6s",name,modules,default,value)
+ end
+ end
+ t.report()
+end
+
+-- we could have used a bit of oo and the trackers:enable syntax but
+-- there is already a lot of code around using the singular tracker
+
+-- we could make this into a module but we also want the rest avaliable
+
+local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show
+
+local function report(setter,...)
+ local report = logs and logs.report
+ if report then
+ report(setter.name,...)
+ else -- fallback, as this module is loaded before the logger
+ write_nl(format("%-15s : %s\n",setter.name,format(...)))
+ end
+end
+
+function setters.new(name)
+ local setter -- we need to access it in setter itself
+ setter = {
+ data = allocate(), -- indexed, but also default and value fields
+ name = name,
+ report = function(...) report (setter,...) end,
+ enable = function(...) enable (setter,...) end,
+ disable = function(...) disable (setter,...) end,
+ register = function(...) register(setter,...) end,
+ list = function(...) list (setter,...) end,
+ show = function(...) show (setter,...) end,
+ }
+ data[name] = setter
+ return setter
+end
+
+trackers = setters.new("trackers")
+directives = setters.new("directives")
+experiments = setters.new("experiments")
+
+local t_enable, t_disable, t_report = trackers .enable, trackers .disable, trackers .report
+local d_enable, d_disable, d_report = directives .enable, directives .disable, directives .report
+local e_enable, e_disable, e_report = experiments.enable, experiments.disable, experiments.report
+
+-- nice trick: we overload two of the directives related functions with variants that
+-- do tracing (itself using a tracker) .. proof of concept
+
+local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
+local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
+
+function directives.enable(...)
+ if trace_directives then
+ d_report("enabling: %s",concat({...}," "))
+ end
+ d_enable(...)
+end
+
+function directives.disable(...)
+ if trace_directives then
+ d_report("disabling: %s",concat({...}," "))
+ end
+ d_disable(...)
+end
+
+function experiments.enable(...)
+ if trace_experiments then
+ e_report("enabling: %s",concat({...}," "))
+ end
+ e_enable(...)
+end
+
+function experiments.disable(...)
+ if trace_experiments then
+ e_report("disabling: %s",concat({...}," "))
+ end
+ e_disable(...)
+end
+
+-- a useful example
+
+directives.register("system.nostatistics", function(v)
+ statistics.enable = not v
+end)
+
+directives.register("system.nolibraries", function(v)
+ libraries = nil -- we discard this tracing for security
+end)
+
+-- experiment
+
+local flags = environment and environment.engineflags
+
+if flags then
+ if trackers and flags.trackers then
+ setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
+ -- t_enable(flags.trackers)
+ end
+ if directives and flags.directives then
+ setters.initialize("flags","directives", settings_to_hash(flags.directives))
+ -- d_enable(flags.directives)
+ end
+end
+
+-- here
+
+if texconfig then
+
+ local function set(k,v)
+ v = tonumber(v)
+ if v then
+ texconfig[k] = v
+ end
+ end
+
+ directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end)
+ directives.register("luatex.hashextra", function(v) set("hash_extra",v) end)
+ directives.register("luatex.nestsize", function(v) set("nest_size",v) end)
+ directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end)
+ directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end)
+ directives.register("luatex.maxstrings", function(v) set("max_strings",v) end)
+ directives.register("luatex.paramsize", function(v) set("param_size",v) end)
+ directives.register("luatex.savesize", function(v) set("save_size",v) end)
+ directives.register("luatex.stacksize", function(v) set("stack_size",v) end)
+
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-log'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: less categories, more subcategories (e.g. nodes)
+
+
+local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
+local format, gmatch, find = string.format, string.gmatch, string.find
+local concat, insert, remove = table.concat, table.insert, table.remove
+local escapedpattern = string.escapedpattern
+local texcount = tex and tex.count
+local next, type = next, type
+
+local setmetatableindex = table.setmetatableindex
+
+--[[ldx--
+This is a prelude to a more extensive logging module. We no longer
+provide based logging a sparsing is relatively easy anyway.
+--ldx]]--
+
+logs = logs or { }
+local logs = logs
+
+local moreinfo = [[
+More information about ConTeXt and the tools that come with it can be found at:
+
+maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
+wiki : http://contextgarden.net
+]]
+
+-- basic loggers
+
+local function ignore() end
+
+setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
+
+local report, subreport, status, settarget, setformats, settranslations
+
+local direct, subdirect, writer, pushtarget, poptarget
+
+if tex and tex.jobname or tex.formatname then
+
+ local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
+
+ local target = "term and log"
+
+ logs.flush = io.flush
+
+ local formats = { } setmetatable(formats, valueiskey)
+ local translations = { } setmetatable(translations,valueiskey)
+
+ writer = function(...)
+ write_nl(target,...)
+ end
+
+ report = function(a,b,c,...)
+ if c then
+ write_nl(target,format("%-15s > %s\n",translations[a],format(formats[b],c,...)))
+ elseif b then
+ write_nl(target,format("%-15s > %s\n",translations[a],formats[b]))
+ elseif a then
+ write_nl(target,format("%-15s >\n", translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+
+ direct = function(a,b,c,...)
+ if c then
+ return format("%-15s > %s",translations[a],format(formats[b],c,...))
+ elseif b then
+ return format("%-15s > %s",translations[a],formats[b])
+ elseif a then
+ return format("%-15s >", translations[a])
+ else
+ return ""
+ end
+ end
+
+ subreport = function(a,s,b,c,...)
+ if c then
+ write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],format(formats[b],c,...)))
+ elseif b then
+ write_nl(target,format("%-15s > %s > %s\n",translations[a],translations[s],formats[b]))
+ elseif a then
+ write_nl(target,format("%-15s > %s >\n", translations[a],translations[s]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+
+ subdirect = function(a,s,b,c,...)
+ if c then
+ return format("%-15s > %s > %s",translations[a],translations[s],format(formats[b],c,...))
+ elseif b then
+ return format("%-15s > %s > %s",translations[a],translations[s],formats[b])
+ elseif a then
+ return format("%-15s > %s >", translations[a],translations[s])
+ else
+ return ""
+ end
+ end
+
+ status = function(a,b,c,...)
+ if c then
+ write_nl(target,format("%-15s : %s\n",translations[a],format(formats[b],c,...)))
+ elseif b then
+ write_nl(target,format("%-15s : %s\n",translations[a],formats[b]))
+ elseif a then
+ write_nl(target,format("%-15s :\n", translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+
+ local targets = {
+ logfile = "log",
+ log = "log",
+ file = "log",
+ console = "term",
+ terminal = "term",
+ both = "term and log",
+ }
+
+ settarget = function(whereto)
+ target = targets[whereto or "both"] or targets.both
+ if target == "term" or target == "term and log" then
+ logs.flush = io.flush
+ else
+ logs.flush = ignore
+ end
+ end
+
+ local stack = { }
+
+ pushtarget = function(newtarget)
+ insert(stack,target)
+ settarget(newtarget)
+ end
+
+ poptarget = function()
+ if #stack > 0 then
+ settarget(remove(stack))
+ end
+ end
+
+ setformats = function(f)
+ formats = f
+ end
+
+ settranslations = function(t)
+ translations = t
+ end
+
+else
+
+ logs.flush = ignore
+
+ writer = write_nl
+
+ report = function(a,b,c,...)
+ if c then
+ write_nl(format("%-15s | %s",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-15s | %s",a,b))
+ elseif a then
+ write_nl(format("%-15s |", a))
+ else
+ write_nl("")
+ end
+ end
+
+ subreport = function(a,sub,b,c,...)
+ if c then
+ write_nl(format("%-15s | %s | %s",a,sub,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-15s | %s | %s",a,sub,b))
+ elseif a then
+ write_nl(format("%-15s | %s |", a,sub))
+ else
+ write_nl("")
+ end
+ end
+
+ status = function(a,b,c,...) -- not to be used in lua anyway
+ if c then
+ write_nl(format("%-15s : %s\n",a,format(b,c,...)))
+ elseif b then
+ write_nl(format("%-15s : %s\n",a,b)) -- b can have %'s
+ elseif a then
+ write_nl(format("%-15s :\n", a))
+ else
+ write_nl("\n")
+ end
+ end
+
+ direct = ignore
+ subdirect = ignore
+
+ settarget = ignore
+ pushtarget = ignore
+ poptarget = ignore
+ setformats = ignore
+ settranslations = ignore
+
+end
+
+logs.report = report
+logs.subreport = subreport
+logs.status = status
+logs.settarget = settarget
+logs.pushtarget = pushtarget
+logs.poptarget = poptarget
+logs.setformats = setformats
+logs.settranslations = settranslations
+
+logs.direct = direct
+logs.subdirect = subdirect
+logs.writer = writer
+
+-- installer
+
+-- todo: renew (un) locks when a new one is added and wildcard
+
+local data, states = { }, nil
+
+function logs.reporter(category,subcategory)
+ local logger = data[category]
+ if not logger then
+ local state = false
+ if states == true then
+ state = true
+ elseif type(states) == "table" then
+ for c, _ in next, states do
+ if find(category,c) then
+ state = true
+ break
+ end
+ end
+ end
+ logger = {
+ reporters = { },
+ state = state,
+ }
+ data[category] = logger
+ end
+ local reporter = logger.reporters[subcategory or "default"]
+ if not reporter then
+ if subcategory then
+ reporter = function(...)
+ if not logger.state then
+ subreport(category,subcategory,...)
+ end
+ end
+ logger.reporters[subcategory] = reporter
+ else
+ local tag = category
+ reporter = function(...)
+ if not logger.state then
+ report(category,...)
+ end
+ end
+ logger.reporters.default = reporter
+ end
+ end
+ return reporter
+end
+
+logs.new = logs.reporter -- for old times sake
+
+-- context specicific: this ends up in the macro stream
+
+local ctxreport = logs.writer
+
+function logs.setmessenger(m)
+ ctxreport = m
+end
+
+function logs.messenger(category,subcategory)
+ -- we need to avoid catcode mess (todo: fast context)
+ if subcategory then
+ return function(...)
+ ctxreport(subdirect(category,subcategory,...))
+ end
+ else
+ return function(...)
+ ctxreport(direct(category,...))
+ end
+ end
+end
+
+-- so far
+
+local function setblocked(category,value)
+ if category == true then
+ -- lock all
+ category, value = "*", true
+ elseif category == false then
+ -- unlock all
+ category, value = "*", false
+ elseif value == nil then
+ -- lock selective
+ value = true
+ end
+ if category == "*" then
+ states = value
+ for k, v in next, data do
+ v.state = value
+ end
+ else
+ states = utilities.parsers.settings_to_hash(category)
+ for c, _ in next, states do
+ if data[c] then
+ v.state = value
+ else
+ c = escapedpattern(c,true)
+ for k, v in next, data do
+ if find(k,c) then
+ v.state = value
+ end
+ end
+ end
+ end
+ end
+end
+
+function logs.disable(category,value)
+ setblocked(category,value == nil and true or value)
+end
+
+function logs.enable(category)
+ setblocked(category,false)
+end
+
+function logs.categories()
+ return table.sortedkeys(data)
+end
+
+function logs.show()
+ local n, c, s, max = 0, 0, 0, 0
+ for category, v in table.sortedpairs(data) do
+ n = n + 1
+ local state = v.state
+ local reporters = v.reporters
+ local nc = #category
+ if nc > c then
+ c = nc
+ end
+ for subcategory, _ in next, reporters do
+ local ns = #subcategory
+ if ns > c then
+ s = ns
+ end
+ local m = nc + ns
+ if m > max then
+ max = m
+ end
+ end
+ local subcategories = concat(table.sortedkeys(reporters),", ")
+ if state == true then
+ state = "disabled"
+ elseif state == false then
+ state = "enabled"
+ else
+ state = "unknown"
+ end
+ -- no new here
+ report("logging","category: '%s', subcategories: '%s', state: '%s'",category,subcategories,state)
+ end
+ report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
+end
+
+directives.register("logs.blocked", function(v)
+ setblocked(v,true)
+end)
+
+directives.register("logs.target", function(v)
+ settarget(v)
+end)
+
+-- tex specific loggers (might move elsewhere)
+
+local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it
+
+local real, user, sub
+
+function logs.start_page_number()
+ real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+end
+
+function logs.stop_page_number()
+ if real > 0 then
+ if user > 0 then
+ if sub > 0 then
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ else
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ end
+ else
+ report_pages("flushing realpage %s",real)
+ end
+ else
+ report_pages("flushing page")
+ end
+ logs.flush()
+end
+
+logs.report_job_stat = statistics and statistics.showjobstat
+
+local report_files = logs.reporter("files")
+
+local nesting = 0
+local verbose = false
+local hasscheme = url.hasscheme
+
+-- we don't have show_open and show_close callbacks yet
+
+function logs.show_open(name)
+ -- if hasscheme(name) ~= "virtual" then
+ -- if verbose then
+ -- nesting = nesting + 1
+ -- report_files("level %s, opening %s",nesting,name)
+ -- else
+ -- write(format("(%s",name)) -- tex adds a space
+ -- end
+ -- end
+end
+
+function logs.show_close(name)
+ -- if hasscheme(name) ~= "virtual" then
+ -- if verbose then
+ -- report_files("level %s, closing %s",nesting,name)
+ -- nesting = nesting - 1
+ -- else
+ -- write(")") -- tex adds a space
+ -- end
+ -- end
+end
+
+function logs.show_load(name)
+ -- if hasscheme(name) ~= "virtual" then
+ -- if verbose then
+ -- report_files("level %s, loading %s",nesting+1,name)
+ -- else
+ -- write(format("(%s)",name))
+ -- end
+ -- end
+end
+
+-- there may be scripts out there using this:
+
+local simple = logs.reporter("comment")
+
+logs.simple = simple
+logs.simpleline = simple
+
+-- obsolete
+
+function logs.setprogram () end -- obsolete
+function logs.extendbanner() end -- obsolete
+function logs.reportlines () end -- obsolete
+function logs.reportbanner() end -- obsolete
+function logs.reportline () end -- obsolete
+function logs.simplelines () end -- obsolete
+function logs.help () end -- obsolete
+
+-- applications
+
+local function reportlines(t,str)
+ if str then
+ for line in gmatch(str,"(.-)[\n\r]") do
+ t.report(line)
+ end
+ end
+end
+
+local function reportbanner(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ t.report()
+ end
+end
+
+local function reporthelp(t,...)
+ local helpinfo = t.helpinfo
+ if type(helpinfo) == "string" then
+ reportlines(t,helpinfo)
+ elseif type(helpinfo) == "table" then
+ local tags = { ... }
+ for i=1,#tags do
+ reportlines(t,t.helpinfo[tags[i]])
+ if i < #tags then
+ t.report()
+ end
+ end
+ end
+end
+
+local function reportinfo(t)
+ t.report()
+ reportlines(t,moreinfo)
+end
+
+function logs.application(t)
+ t.name = t.name or "unknown"
+ t.banner = t.banner
+ t.report = logs.reporter(t.name)
+ t.help = function(...) reportbanner(t) ; reporthelp(t,...) ; reportinfo(t) end
+ t.identify = function() reportbanner(t) end
+ return t
+end
+
+-- somewhat special
+
+-- logging to a file
+
+
+function logs.system(whereto,process,jobname,category,...)
+ local message = format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f = io.open(whereto,"a") -- we can consider keepint the file open
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
+ end
+ end
+end
+
+local report_system = logs.reporter("system","logs")
+
+function logs.obsolete(old,new)
+ local o = loadstring("return " .. new)()
+ if type(o) == "function" then
+ return function(...)
+ report_system("function %s is obsolete, use %s",old,new)
+ loadstring(old .. "=" .. new .. " return ".. old)()(...)
+ end
+ elseif type(o) == "table" then
+ local t, m = { }, { }
+ m.__index = function(t,k)
+ report_system("table %s is obsolete, use %s",old,new)
+ m.__index, m.__newindex = o, o
+ return o[k]
+ end
+ m.__newindex = function(t,k,v)
+ report_system("table %s is obsolete, use %s",old,new)
+ m.__index, m.__newindex = o, o
+ o[k] = v
+ end
+ if libraries then
+ libraries.obsolete[old] = t -- true
+ end
+ setmetatable(t,m)
+ return t
+ end
+end
+
+if utilities then
+ utilities.report = report_system
+end
+
+if tex and tex.error then
+ function logs.texerrormessage(...) -- for the moment we put this function here
+ tex.error(format(...), { })
+ end
+else
+ function logs.texerrormessage(...)
+ print(format(...))
+ end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['trac-pro'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+
+-- The protection implemented here is probably not that tight but good enough to catch
+-- problems due to naive usage.
+--
+-- There's a more extensive version (trac-xxx.lua) that supports nesting.
+--
+-- This will change when we have _ENV in lua 5.2+
+
+local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
+
+local report_system = logs.reporter("system","protection")
+
+namespaces = namespaces or { }
+local namespaces = namespaces
+
+local registered = { }
+
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("reference to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to '%s' in protected namespace '%s', %s",k,name,debug.traceback())
+ else
+ report_system("assignment to '%s' in protected namespace '%s'",k,name)
+ end
+end
+
+local function register(name)
+ local data = name == "global" and _G or _G[name]
+ if not data then
+ return -- error
+ end
+ registered[name] = data
+ local m = getmetatable(data)
+ if not m then
+ m = { }
+ setmetatable(data,m)
+ end
+ local index, newindex = { }, { }
+ m.__saved__index = m.__index
+ m.__no__index = function(t,k)
+ if not index[k] then
+ index[k] = true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex = m.__newindex
+ m.__no__newindex = function(t,k,v)
+ if not newindex[k] then
+ newindex[k] = true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth = 0
+end
+
+local function private(name) -- maybe save name
+ local data = registered[name]
+ if not data then
+ data = _G[name]
+ if not data then
+ data = { }
+ _G[name] = data
+ end
+ register(name)
+ end
+ return data
+end
+
+local function protect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 0 then
+ m.__protection__depth = pd + 1
+ else
+ m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
+ m.__index, m.__newindex = m.__no__index, m.__no__newindex
+ m.__protection__depth = 1
+ end
+end
+
+local function unprotect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 1 then
+ m.__protection__depth = pd - 1
+ else
+ m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
+ m.__protection__depth = 0
+ end
+end
+
+local function protectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ protect(name)
+ end
+ end
+end
+
+local function unprotectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ unprotect(name)
+ end
+ end
+end
+
+namespaces.register = register -- register when defined
+namespaces.private = private -- allocate and register if needed
+namespaces.protect = protect
+namespaces.unprotect = unprotect
+namespaces.protectall = protectall
+namespaces.unprotectall = unprotectall
+
+namespaces.private("namespaces") registered = { } register("global") -- unreachable
+
+directives.register("system.protect", function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+
+directives.register("system.checkglobals", function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+-- dummy section (will go to luat-dum.lua)
+
+
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A former version provided functionality for non embeded core
+-- scripts i.e. runtime library loading. Given the amount of
+-- Lua code we use now, this no longer makes sense. Much of this
+-- evolved before bytecode arrays were available and so a lot of
+-- code has disappeared already.
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_lua = logs.reporter("resolvers","lua")
+
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
+local unquoted, quoted = string.unquoted, string.quoted
+local concat = table.concat
+
+-- precautions
+
+os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+
+function os.setlocale()
+ -- no way you can mess with it
+end
+
+-- dirty tricks
+
+if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
+ arg[-1] = arg[0]
+ arg[ 0] = arg[2]
+ for k=3,#arg do
+ arg[k-2] = arg[k]
+ end
+ arg[#arg] = nil -- last
+ arg[#arg] = nil -- pre-last
+end
+
+-- environment
+
+environment = environment or { }
+local environment = environment
+
+environment.arguments = allocate()
+environment.files = allocate()
+environment.sortedflags = nil
+
+local mt = {
+ __index = function(_,k)
+ if k == "version" then
+ local version = tex.toks and tex.toks.contextversiontoks
+ if version and version ~= "" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k == "jobname" or k == "formatname" then
+ local name = tex and tex[k]
+ if name or name== "" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k == "outputfilename" then
+ local name = environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+
+setmetatable(environment,mt)
+
+function environment.initializearguments(arg)
+ local arguments, files = { }, { }
+ environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
+ for index=1,#arg do
+ local argument = arg[index]
+ if index > 0 then
+ local flag, value = match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ arguments[flag] = unquoted(value or "")
+ else
+ flag = match(argument,"^%-+(.+)")
+ if flag then
+ arguments[flag] = true
+ else
+ files[#files+1] = argument
+ end
+ end
+ end
+ end
+ environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+end
+
+function environment.setargument(name,value)
+ environment.arguments[name] = value
+end
+
+-- todo: defaults, better checks e.g on type (boolean versus string)
+--
+-- tricky: too many hits when we support partials unless we add
+-- a registration of arguments so from now on we have 'partial'
+
+function environment.argument(name,partial)
+ local arguments, sortedflags = environment.arguments, environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags = allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k] = "^" .. sortedflags[k]
+ end
+ environment.sortedflags = sortedflags
+ end
+ -- example of potential clash: ^mode ^modefile
+ for k=1,#sortedflags do
+ local v = sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
end
end
- return user, system
+ return nil
end
-function setters.show(t)
- commands.writestatus("","")
- local list = setters.list(t)
- for k=1,#list do
- commands.writestatus(t.name,list[k])
+function environment.splitarguments(separator) -- rather special, cut-off before separator
+ local done, before, after = false, { }, { }
+ local originalarguments = environment.originalarguments
+ for k=1,#originalarguments do
+ local v = originalarguments[k]
+ if not done and v == separator then
+ done = true
+ elseif done then
+ after[#after+1] = v
+ else
+ before[#before+1] = v
+ end
end
- commands.writestatus("","")
+ return before, after
end
--- we could have used a bit of oo and the trackers:enable syntax but
--- there is already a lot of code around using the singular tracker
+function environment.reconstructcommandline(arg,noquote)
+ arg = arg or environment.originalarguments
+ if noquote and #arg == 1 then
+ -- we could just do: return unquoted(resolvers.resolve(arg[i]))
+ local a = arg[1]
+ a = resolvers.resolve(a)
+ a = unquoted(a)
+ return a
+ elseif #arg > 0 then
+ local result = { }
+ for i=1,#arg do
+ -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
+ local a = arg[i]
+ a = resolvers.resolve(a)
+ a = unquoted(a)
+ a = gsub(a,'"','\\"') -- tricky
+ if find(a," ") then
+ result[#result+1] = quoted(a)
+ else
+ result[#result+1] = a
+ end
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
+end
--- we could make this into a module
-function setters.new(name)
- local t
- t = {
- data = { },
- name = name,
- enable = function(...) setters.enable (t,...) end,
- disable = function(...) setters.disable (t,...) end,
- register = function(...) setters.register(t,...) end,
- list = function(...) setters.list (t,...) end,
- show = function(...) setters.show (t,...) end,
- }
- setters.data[name] = t
- return t
-end
+if arg then
-trackers = setters.new("trackers")
-directives = setters.new("directives")
-experiments = setters.new("experiments")
+ -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+ local newarg, instring = { }, false
--- nice trick: we overload two of the directives related functions with variants that
--- do tracing (itself using a tracker) .. proof of concept
+ for index=1,#arg do
+ local argument = arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1] = gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring = true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
+ instring = false
+ elseif instring then
+ newarg[#newarg] = newarg[#newarg] .. " " .. argument
+ else
+ newarg[#newarg+1] = argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i] = arg[i]
+ end
-local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
-local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
+ environment.initializearguments(newarg)
-local e = directives.enable
-local d = directives.disable
+ environment.originalarguments = mark(newarg)
+ environment.rawarguments = mark(arg)
-function directives.enable(...)
- commands.writestatus("directives","enabling: %s",concat({...}," "))
- e(...)
-end
+ arg = { } -- prevent duplicate handling
-function directives.disable(...)
- commands.writestatus("directives","disabling: %s",concat({...}," "))
- d(...)
end
-local e = experiments.enable
-local d = experiments.disable
+-- weird place ... depends on a not yet loaded module
-function experiments.enable(...)
- commands.writestatus("experiments","enabling: %s",concat({...}," "))
- e(...)
+function environment.texfile(filename)
+ return resolvers.findfile(filename,'tex')
end
-function experiments.disable(...)
- commands.writestatus("experiments","disabling: %s",concat({...}," "))
- d(...)
+function environment.luafile(filename)
+ local resolved = resolvers.findfile(filename,'tex') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ resolved = resolvers.findfile(filename,'texmfscripts') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ return resolvers.findfile(filename,'luatexlibs') or ""
end
--- a useful example
+environment.loadedluacode = loadfile -- can be overloaded
-directives.register("system.nostatistics", function(v)
- statistics.enable = not v
-end)
+function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
+ filename = file.replacesuffix(filename, "lua")
+ local fullname = environment.luafile(filename)
+ if fullname and fullname ~= "" then
+ local data = environment.loadedluacode(fullname)
+ if trace_locating then
+ report_lua("loading file %s%s", fullname, not data and " failed" or "")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
+ end
+ return data
+ else
+ if trace_locating then
+ report_lua("unknown file %s", filename)
+ end
+ return nil
+ end
+end
+
+-- the next ones can use the previous ones / combine
+function environment.loadluafile(filename, version)
+ local lucname, luaname, chunk
+ local basename = file.removesuffix(filename)
+ if basename == filename then
+ lucname, luaname = basename .. ".luc", basename .. ".lua"
+ else
+ lucname, luaname = nil, basename -- forced suffix
+ end
+ -- when not overloaded by explicit suffix we look for a luc file first
+ local fullname = (lucname and environment.luafile(lucname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_lua("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ -- we check of the version number of this chunk matches
+ local v = version -- can be nil
+ if modules and modules[filename] then
+ v = modules[filename].version -- new method
+ elseif versions and versions[filename] then
+ v = versions[filename] -- old method
+ end
+ if v == version then
+ return true
+ else
+ if trace_locating then
+ report_lua("version mismatch for %s: lua=%s, luc=%s", filename, v, version)
+ end
+ environment.loadluafile(filename)
+ end
+ else
+ return true
+ end
+ end
+ fullname = (luaname and environment.luafile(luaname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_lua("loading %s", fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ if not chunk then
+ if trace_locating then
+ report_lua("unknown file %s", filename)
+ end
+ else
+ assert(chunk)()
+ return true
+ end
+ end
+ return false
+end
end -- of closure
@@ -3991,6 +6263,8 @@ if not modules then modules = { } end modules ['lxml-tab'] = {
local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+local report_xml = logs and logs.reporter("xml","core") or function(...) print(format(...)) end
+
--[[ldx--
The parser used here is inspired by the variant discussed in the lua book, but
handles comment and processing instructions, has a different structure, provides
@@ -4001,11 +6275,15 @@ The find based parser can be found in l-xml-edu.lua along with other older code.
Beware, the interface may change. For instance at, ns, tg, dt may get more
verbose names. Once the code is stable we will also remove some tracing and
optimize the code.
+
+I might even decide to reimplement the parser using the latest trickery
+as the current variant was written when showed up and it's easier now to
+build tables in one go.
--ldx]]--
xml = xml or { }
+local xml = xml
---~ local xml = xml
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
@@ -4129,7 +6407,7 @@ local dcache, hcache, acache = { }, { }, { }
local mt = { }
-function initialize_mt(root)
+local function initialize_mt(root)
mt = { __index = root } -- will be redefined later
end
@@ -4137,8 +6415,8 @@ function xml.setproperty(root,k,v)
getmetatable(root).__index[k] = v
end
-function xml.check_error(top,toclose)
- return ""
+function xml.checkerror(top,toclose)
+ return "" -- can be set
end
local function add_attribute(namespace,tag,value)
@@ -4194,9 +6472,9 @@ local function add_end(spacing, namespace, tag)
local toclose = remove(stack)
top = stack[#stack]
if #stack < 1 then
- errorstr = format("nothing to close with %s %s", tag, xml.check_error(top,toclose) or "")
+ errorstr = format("nothing to close with %s %s", tag, xml.checkerror(top,toclose) or "")
elseif toclose.tg ~= tag then -- no namespace check
- errorstr = format("unable to close %s with %s %s", toclose.tg, tag, xml.check_error(top,toclose) or "")
+ errorstr = format("unable to close %s with %s %s", toclose.tg, tag, xml.checkerror(top,toclose) or "")
end
dt = top.dt
dt[#dt+1] = toclose
@@ -4233,24 +6511,29 @@ local reported_attribute_errors = { }
local function attribute_value_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute value: %q",str)
+ report_xml("invalid attribute value: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
return str
end
+
local function attribute_specification_error(str)
if not reported_attribute_errors[str] then
- logs.report("xml","invalid attribute specification: %q",str)
+ report_xml("invalid attribute specification: %q",str)
reported_attribute_errors[str] = true
at._error_ = str
end
return str
end
-function xml.unknown_dec_entity_format(str) return (str == "" and "&error;") or format("&%s;",str) end
-function xml.unknown_hex_entity_format(str) return format("%s;",str) end
-function xml.unknown_any_entity_format(str) return format("%s;",str) end
+xml.placeholders = {
+ unknown_dec_entity = function(str) return (str == "" and "&error;") or format("&%s;",str) end,
+ unknown_hex_entity = function(str) return format("%s;",str) end,
+ unknown_any_entity = function(str) return format("%s;",str) end,
+}
+
+local placeholders = xml.placeholders
local function fromhex(s)
local n = tonumber(s,16)
@@ -4304,18 +6587,18 @@ local function handle_hex_entity(str)
h = unify_predefined and predefined_unified[n]
if h then
if trace_entities then
- logs.report("xml","utfize, converting hex entity %s; into %s",str,h)
+ report_xml("utfize, converting hex entity %s; into %s",str,h)
end
elseif utfize then
- h = (n and utfchar(n)) or xml.unknown_hex_entity_format(str) or ""
+ h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
if not n then
- logs.report("xml","utfize, ignoring hex entity %s;",str)
+ report_xml("utfize, ignoring hex entity %s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting hex entity %s; into %s",str,h)
+ report_xml("utfize, converting hex entity %s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity %s;",str)
+ report_xml("found entity %s;",str)
end
h = "" .. str .. ";"
end
@@ -4331,18 +6614,18 @@ local function handle_dec_entity(str)
d = unify_predefined and predefined_unified[n]
if d then
if trace_entities then
- logs.report("xml","utfize, converting dec entity %s; into %s",str,d)
+ report_xml("utfize, converting dec entity %s; into %s",str,d)
end
elseif utfize then
- d = (n and utfchar(n)) or xml.unknown_dec_entity_format(str) or ""
+ d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
if not n then
- logs.report("xml","utfize, ignoring dec entity %s;",str)
+ report_xml("utfize, ignoring dec entity %s;",str)
elseif trace_entities then
- logs.report("xml","utfize, converting dec entity %s; into %s",str,h)
+ report_xml("utfize, converting dec entity %s; into %s",str,h)
end
else
if trace_entities then
- logs.report("xml","found entity %s;",str)
+ report_xml("found entity %s;",str)
end
d = "" .. str .. ";"
end
@@ -4367,20 +6650,21 @@ local function handle_any_entity(str)
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (internal)",str,a)
+ report_xml("resolved entity &%s; -> %s (internal)",str,a)
end
a = lpegmatch(parsedentity,a) or a
else
- if xml.unknown_any_entity_format then
- a = xml.unknown_any_entity_format(str) or ""
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
end
if a then
if trace_entities then
- logs.report("xml","resolved entity &%s; -> %s (external)",str,a)
+ report_xml("resolved entity &%s; -> %s (external)",str,a)
end
else
if trace_entities then
- logs.report("xml","keeping entity &%s;",str)
+ report_xml("keeping entity &%s;",str)
end
if str == "" then
a = "&error;"
@@ -4392,7 +6676,7 @@ local function handle_any_entity(str)
acache[str] = a
elseif trace_entities then
if not acache[str] then
- logs.report("xml","converting entity &%s; into %s",str,a)
+ report_xml("converting entity &%s; into %s",str,a)
acache[str] = a
end
end
@@ -4401,7 +6685,7 @@ local function handle_any_entity(str)
local a = acache[str]
if not a then
if trace_entities then
- logs.report("xml","found entity &%s;",str)
+ report_xml("found entity &%s;",str)
end
a = resolve_predefined and predefined_simplified[str]
if a then
@@ -4420,7 +6704,7 @@ local function handle_any_entity(str)
end
local function handle_end_entity(chr)
- logs.report("xml","error in entity, %q found instead of ';'",chr)
+ report_xml("error in entity, %q found instead of ';'",chr)
end
local space = S(' \r\n\t')
@@ -4555,7 +6839,7 @@ local function xmlconvert(data, settings)
resolve_predefined = settings.resolve_predefined_entities -- in case we have escaped entities
unify_predefined = settings.unify_predefined_entities -- & -> &
cleanup = settings.text_cleanup
- stack, top, at, xmlns, errorstr, result, entities = { }, { }, { }, { }, nil, nil, settings.entities or { }
+ stack, top, at, xmlns, errorstr, entities = { }, { }, { }, { }, nil, settings.entities or { }
acache, hcache, dcache = { }, { }, { } -- not stored
reported_attribute_errors = { }
if settings.parent_root then
@@ -4583,16 +6867,17 @@ local function xmlconvert(data, settings)
else
errorstr = "invalid xml file - no text at all"
end
+ local result
if errorstr and errorstr ~= "" then
result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
setmetatable(stack, mt)
- local error_handler = settings.error_handler
- if error_handler == false then
+ local errorhandler = settings.error_handler
+ if errorhandler == false then
-- no error message
else
- error_handler = error_handler or xml.error_handler
- if error_handler then
- xml.error_handler("load",errorstr)
+ errorhandler = errorhandler or xml.errorhandler
+ if errorhandler then
+ xml.errorhandler("load",errorstr)
end
end
else
@@ -4652,7 +6937,7 @@ function xml.is_valid(root)
return root and not root.error
end
-xml.error_handler = (logs and logs.report) or (input and logs.report) or print
+xml.errorhandler = report
--[[ldx--
We cannot load an from a filehandle so we need to load
@@ -4763,7 +7048,7 @@ local function verbose_element(e,handlers)
ats[#ats+1] = format('%s=%q',k,v)
end
end
- if ern and trace_remap and ern ~= ens then
+ if ern and trace_entities and ern ~= ens then
ens = ern
end
if ens ~= "" then
@@ -4868,7 +7153,7 @@ local function serialize(e,handlers,...)
-- elseif type(e) == "string" then
-- functions["@tx@"](e,handlers)
else
- functions["@dc@"](e,handlers)
+ functions["@dc@"](e,handlers) -- dc ?
end
if finalize then
return finalize()
@@ -4894,7 +7179,7 @@ local function newhandlers(settings)
if settings then
for k,v in next, settings do
if type(v) == "table" then
- tk = t[k] if not tk then tk = { } t[k] = tk end
+ local tk = t[k] if not tk then tk = { } t[k] = tk end
for kk,vv in next, v do
tk[kk] = vv
end
@@ -4906,6 +7191,7 @@ local function newhandlers(settings)
handlers[settings.name] = t
end
end
+ utilities.storage.mark(t)
return t
end
@@ -5005,7 +7291,7 @@ local function xmltext(root) -- inline
return (root and xmltostring(root)) or ""
end
-function initialize_mt(root)
+initialize_mt = function(root) -- redefinition
mt = { __tostring = xmltext, __index = root }
end
@@ -5040,7 +7326,6 @@ xml.string = xmlstring
A few helpers:
--ldx]]--
---~ xmlsetproperty(root,"settings",settings)
function xml.settings(e)
while e do
@@ -5150,6 +7435,7 @@ function xml.makestandalone(root)
end
end
end
+ return root
end
@@ -5166,11 +7452,14 @@ if not modules then modules = { } end modules ['lxml-pth'] = {
}
-- e.ni is only valid after a filter run
+-- todo: B/C/[get first match]
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
-local lpegmatch = lpeg.match
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+
+local setmetatableindex = table.setmetatableindex
-- beware, this is not xpath ... e.g. position is different (currently) and
-- we have reverse-sibling as reversed preceding sibling
@@ -5202,23 +7491,33 @@ local trace_lpath = false if trackers then trackers.register("xml.path",
local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
+local report_lpath = logs.reporter("xml","lpath")
+
--[[ldx--
We've now arrived at an interesting part: accessing the tree using a subset
of and since we're not compatible we call it . We
will explain more about its usage in other documents.
--ldx]]--
+local xml = xml
+
local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end
local lpathcached = 0 function xml.lpathcached() return lpathcached end
-xml.functions = xml.functions or { } -- internal
-xml.expressions = xml.expressions or { } -- in expressions
-xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
-xml.specialhandler = xml.specialhandler or { }
+xml.functions = xml.functions or { } -- internal
+local functions = xml.functions
+
+xml.expressions = xml.expressions or { } -- in expressions
+local expressions = xml.expressions
+
+xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
+local finalizers = xml.finalizers
-local functions = xml.functions
-local expressions = xml.expressions
-local finalizers = xml.finalizers
+xml.specialhandler = xml.specialhandler or { }
+local specialhandler = xml.specialhandler
+
+lpegpatterns.xml = lpegpatterns.xml or { }
+local xmlpatterns = lpegpatterns.xml
finalizers.xml = finalizers.xml or { }
finalizers.tex = finalizers.tex or { }
@@ -5228,14 +7527,14 @@ local function fallback (t, name)
if fn then
t[name] = fn
else
- logs.report("xml","unknown sub finalizer '%s'",tostring(name))
+ report_lpath("unknown sub finalizer '%s'",tostring(name))
fn = function() end
end
return fn
end
-setmetatable(finalizers.xml, { __index = fallback })
-setmetatable(finalizers.tex, { __index = fallback })
+setmetatableindex(finalizers.xml, fallback)
+setmetatableindex(finalizers.tex, fallback)
xml.defaultprotocol = "xml"
@@ -5283,102 +7582,106 @@ apply_axis['root'] = function(list)
rt = ll
end
end
- collected[#collected+1] = rt
+ collected[l] = rt
end
return collected
end
apply_axis['self'] = function(list)
---~ local collected = { }
---~ for l=1,#list do
---~ collected[#collected+1] = list[l]
---~ end
---~ return collected
return list
end
apply_axis['child'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local ll = list[l]
local dt = ll.dt
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- collected[#collected+1] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
+ if dt then -- weird that this is needed
+ local en = 0
+ for k=1,#dt do
+ local dk = dt[k]
+ if dk.tg then
+ c = c + 1
+ collected[c] = dk
+ dk.ni = k -- refresh
+ en = en + 1
+ dk.ei = en
+ end
end
+ ll.en = en
end
- ll.en = en
end
return collected
end
-local function collect(list,collected)
+local function collect(list,collected,c)
local dt = list.dt
if dt then
local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
- collected[#collected+1] = dk
+ c = c + 1
+ collected[c] = dk
dk.ni = k -- refresh
en = en + 1
dk.ei = en
- collect(dk,collected)
+ c = collect(dk,collected,c)
end
end
list.en = en
end
+ return c
end
apply_axis['descendant'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
- collect(list[l],collected)
+ c = collect(list[l],collected,c)
end
return collected
end
-local function collect(list,collected)
+local function collect(list,collected,c)
local dt = list.dt
if dt then
local en = 0
for k=1,#dt do
local dk = dt[k]
if dk.tg then
- collected[#collected+1] = dk
+ c = c + 1
+ collected[c] = dk
dk.ni = k -- refresh
en = en + 1
dk.ei = en
- collect(dk,collected)
+ c = collect(dk,collected,c)
end
end
list.en = en
end
+ return c
end
apply_axis['descendant-or-self'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local ll = list[l]
if ll.special ~= true then -- catch double root
- collected[#collected+1] = ll
+ c = c + 1
+ collected[c] = ll
end
- collect(ll,collected)
+ c = collect(ll,collected,c)
end
return collected
end
apply_axis['ancestor'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local ll = list[l]
while ll do
ll = ll.__p__
if ll then
- collected[#collected+1] = ll
+ c = c + 1
+ collected[c] = ll
end
end
end
@@ -5386,14 +7689,16 @@ apply_axis['ancestor'] = function(list)
end
apply_axis['ancestor-or-self'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local ll = list[l]
- collected[#collected+1] = ll
+ c = c + 1
+ collected[c] = ll
while ll do
ll = ll.__p__
if ll then
- collected[#collected+1] = ll
+ c = c + 1
+ collected[c] = ll
end
end
end
@@ -5401,11 +7706,12 @@ apply_axis['ancestor-or-self'] = function(list)
end
apply_axis['parent'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local pl = list[l].__p__
if pl then
- collected[#collected+1] = pl
+ c = c + 1
+ collected[c] = pl
end
end
return collected
@@ -5420,43 +7726,15 @@ apply_axis['namespace'] = function(list)
end
apply_axis['following'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni+1,#d do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
apply_axis['preceding'] = function(list) -- incomplete
---~ local collected = { }
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni-1,1,-1 do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ collected[#collected+1] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
return { }
end
apply_axis['following-sibling'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local ll = list[l]
local p = ll.__p__
@@ -5464,7 +7742,8 @@ apply_axis['following-sibling'] = function(list)
for i=ll.ni+1,#d do
local di = d[i]
if type(di) == "table" then
- collected[#collected+1] = di
+ c = c + 1
+ collected[c] = di
end
end
end
@@ -5472,7 +7751,7 @@ apply_axis['following-sibling'] = function(list)
end
apply_axis['preceding-sibling'] = function(list)
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local ll = list[l]
local p = ll.__p__
@@ -5480,7 +7759,8 @@ apply_axis['preceding-sibling'] = function(list)
for i=1,ll.ni-1 do
local di = d[i]
if type(di) == "table" then
- collected[#collected+1] = di
+ c = c + 1
+ collected[c] = di
end
end
end
@@ -5488,7 +7768,7 @@ apply_axis['preceding-sibling'] = function(list)
end
apply_axis['reverse-sibling'] = function(list) -- reverse preceding
- local collected = { }
+ local collected, c = { }, 0
for l=1,#list do
local ll = list[l]
local p = ll.__p__
@@ -5496,7 +7776,8 @@ apply_axis['reverse-sibling'] = function(list) -- reverse preceding
for i=ll.ni-1,1,-1 do
local di = d[i]
if type(di) == "table" then
- collected[#collected+1] = di
+ c = c + 1
+ collected[c] = di
end
end
end
@@ -5522,7 +7803,7 @@ local function apply_nodes(list,directive,nodes)
return { }
end
else
- local collected, m, p = { }, 0, nil
+ local collected, c, m, p = { }, 0, 0, nil
if not nns then -- only check tag
for l=1,#list do
local ll = list[l]
@@ -5531,11 +7812,13 @@ local function apply_nodes(list,directive,nodes)
if directive then
if ntg == ltg then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
elseif ntg ~= ltg then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
end
end
@@ -5547,11 +7830,13 @@ local function apply_nodes(list,directive,nodes)
if directive then
if lns == nns then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
elseif lns ~= nns then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
end
end
@@ -5565,11 +7850,13 @@ local function apply_nodes(list,directive,nodes)
if directive then
if ok then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
elseif not ok then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
end
end
@@ -5577,7 +7864,7 @@ local function apply_nodes(list,directive,nodes)
return collected
end
else
- local collected, m, p = { }, 0, nil
+ local collected, c, m, p = { }, 0, 0, nil
for l=1,#list do
local ll = list[l]
local ltg = ll.tg
@@ -5594,11 +7881,13 @@ local function apply_nodes(list,directive,nodes)
if directive then
if ok then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
elseif not ok then
local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- collected[#collected+1], ll.mi = ll, m
+ c = c + 1
+ collected[c], ll.mi = ll, m
end
end
end
@@ -5609,12 +7898,13 @@ end
local quit_expression = false
local function apply_expression(list,expression,order)
- local collected = { }
+ local collected, c = { }, 0
quit_expression = false
for l=1,#list do
local ll = list[l]
if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1
- collected[#collected+1] = ll
+ c = c + 1
+ collected[c] = ll
end
if quit_expression then
break
@@ -5653,7 +7943,9 @@ local lp_builtin = P (
P("ns") / "ll.ns"
) * ((spaces * P("(") * spaces * P(")"))/"")
-local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * R("az","AZ","--","__")^1 * Cc("'])")
+-- for the moment we keep namespaces with attributes
+
+local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
local lp_fastpos_p = ((P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end)
local lp_fastpos_n = ((P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end)
local lp_fastpos = lp_fastpos_n + lp_fastpos_p
@@ -5714,14 +8006,12 @@ local converter = Cs (
)
cleaner = Cs ( (
---~ lp_fastpos +
lp_reserved +
lp_number +
lp_string +
1 )^1 )
---~ expr
local template_e = [[
local expr = xml.expressions
@@ -5772,13 +8062,13 @@ local skip = { }
local function errorrunner_e(str,cnv)
if not skip[str] then
- logs.report("lpath","error in expression: %s => %s",str,cnv)
+ report_lpath("error in expression: %s => %s",str,cnv)
skip[str] = cnv or str
end
return false
end
local function errorrunner_f(str,arg)
- logs.report("lpath","error in finalizer: %s(%s)",str,arg or "")
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
return false
end
@@ -5829,7 +8119,7 @@ local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes
local special_2 = P("/") * Cc(register_auto_self)
local special_3 = P("") * Cc(register_auto_self)
-local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving pattern outside
patterns = spaces * V("protocol") * spaces * (
( V("special") * spaces * P(-1) ) +
@@ -5901,6 +8191,8 @@ local parser = Ct { "patterns", -- can be made a bit faster by moving pattern ou
}
+xmlpatterns.pathparser = pathparser
+
local cache = { }
local function nodesettostring(set,nodetest)
@@ -5910,7 +8202,7 @@ local function nodesettostring(set,nodetest)
if not ns or ns == "" then ns = "*" end
if not tg or tg == "" then tg = "*" end
tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[#t+1] = (directive and tg) or format("not(%s)",tg)
+ t[i] = (directive and tg) or format("not(%s)",tg)
end
if nodetest == false then
return format("not(%s)",concat(t,"|"))
@@ -5929,7 +8221,7 @@ local function tagstostring(list)
local ns, tg = li.ns, li.tg
if not ns or ns == "" then ns = "*" end
if not tg or tg == "" then tg = "*" end
- t[#t+1] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
end
return concat(t," ")
end
@@ -5937,16 +8229,16 @@ end
xml.nodesettostring = nodesettostring
-local parse_pattern -- we have a harmless kind of circular reference
+local lpath -- we have a harmless kind of circular reference
+
+local lshowoptions = { functions = false }
local function lshow(parsed)
if type(parsed) == "string" then
- parsed = parse_pattern(parsed)
+ parsed = lpath(parsed)
end
- local s = table.serialize_functions -- ugly
- table.serialize_functions = false -- ugly
- logs.report("lpath","%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,table.serialize(parsed,false))
- table.serialize_functions = s -- ugly
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
+ table.serialize(parsed,false,lshowoptions))
end
xml.lshow = lshow
@@ -5960,7 +8252,7 @@ local function add_comment(p,str)
end
end
-parse_pattern = function (pattern) -- the gain of caching is rather minimal
+lpath = function (pattern) -- the gain of caching is rather minimal
lpathcalls = lpathcalls + 1
if type(pattern) == "table" then
return pattern
@@ -5969,13 +8261,13 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
if parsed then
lpathcached = lpathcached + 1
else
- parsed = lpegmatch(parser,pattern)
+ parsed = lpegmatch(pathparser,pattern)
if parsed then
parsed.pattern = pattern
local np = #parsed
if np == 0 then
parsed = { pattern = pattern, register_self, state = "parsing error" }
- logs.report("lpath","parsing error in '%s'",pattern)
+ report_lpath("parsing error in '%s'",pattern)
lshow(parsed)
else
-- we could have done this with a more complex parser but this
@@ -6017,6 +8309,8 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal
end
end
+xml.lpath = lpath
+
-- we can move all calls inline and then merge the trace back
-- technically we can combine axis and the next nodes which is
-- what we did before but this a bit cleaner (but slower too)
@@ -6079,32 +8373,32 @@ local function traced_apply(list,parsed,nofparsed,order)
if trace_lparse then
lshow(parsed)
end
- logs.report("lpath", "collecting : %s",parsed.pattern)
- logs.report("lpath", " root tags : %s",tagstostring(list))
- logs.report("lpath", " order : %s",order or "unset")
+ report_lpath("collecting: %s",parsed.pattern)
+ report_lpath("root tags : %s",tagstostring(list))
+ report_lpath("order : %s",order or "unset")
local collected = list
for i=1,nofparsed do
local pi = parsed[i]
local kind = pi.kind
if kind == "axis" then
collected = apply_axis[pi.axis](collected)
- logs.report("lpath", "% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
elseif kind == "nodes" then
collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- logs.report("lpath", "% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
elseif kind == "expression" then
collected = apply_expression(collected,pi.evaluator,order)
- logs.report("lpath", "% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
elseif kind == "finalizer" then
collected = pi.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
return collected
end
if not collected or #collected == 0 then
local pn = i < nofparsed and parsed[nofparsed]
if pn and pn.kind == "finalizer" then
collected = pn.finalizer(collected)
- logs.report("lpath", "% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
return collected
end
return nil
@@ -6141,8 +8435,11 @@ local function normal_apply(list,parsed,nofparsed,order)
return collected
end
-local function parse_apply(list,pattern)
- -- we avoid an extra call
+
+local function applylpath(list,pattern)
+ if not list then
+ return
+ end
local parsed = cache[pattern]
if parsed then
lpathcalls = lpathcalls + 1
@@ -6151,7 +8448,7 @@ local function parse_apply(list,pattern)
lpathcalls = lpathcalls + 1
parsed = pattern
else
- parsed = parse_pattern(pattern) or pattern
+ parsed = lpath(pattern) or pattern
end
if not parsed then
return
@@ -6160,25 +8457,32 @@ local function parse_apply(list,pattern)
if nofparsed == 0 then
return -- something is wrong
end
- local one = list[1]
- if not one then
- return -- something is wrong
- elseif not trace_lpath then
- return normal_apply(list,parsed,nofparsed,one.mi)
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
elseif trace_lprofile then
- return profiled_apply(list,parsed,nofparsed,one.mi)
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
else
- return traced_apply(list,parsed,nofparsed,one.mi)
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
end
end
+xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
+
+--[[ldx--
+This is the main filter function. It returns whatever is asked for.
+--ldx]]--
+
+function xml.filter(root,pattern) -- no longer funny attribute handling here
+ return applylpath(root,pattern)
+end
+
-- internal (parsed)
expressions.child = function(e,pattern)
- return parse_apply({ e },pattern) -- todo: cache
+ return applylpath(e,pattern) -- todo: cache
end
expressions.count = function(e,pattern)
- local collected = parse_apply({ e },pattern) -- todo: cache
+ local collected = applylpath(e,pattern) -- todo: cache
return (collected and #collected) or 0
end
@@ -6188,7 +8492,7 @@ expressions.oneof = function(s,...) -- slow
local t = {...} for i=1,#t do if s == t[i] then return true end end return false
end
expressions.error = function(str)
- xml.error_handler("unknown function in lpath expression",tostring(str or "?"))
+ xml.errorhandler("unknown function in lpath expression",tostring(str or "?"))
return false
end
expressions.undefined = function(s)
@@ -6217,8 +8521,8 @@ expressions.boolean = toboolean
-- user interface
local function traverse(root,pattern,handle)
- logs.report("xml","use 'xml.selection' instead for '%s'",pattern)
- local collected = parse_apply({ root },pattern)
+ report_lpath("use 'xml.selection' instead for '%s'",pattern)
+ local collected = applylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -6229,7 +8533,7 @@ local function traverse(root,pattern,handle)
end
local function selection(root,pattern,handle)
- local collected = parse_apply({ root },pattern)
+ local collected = applylpath(root,pattern)
if collected then
if handle then
for c=1,#collected do
@@ -6241,19 +8545,9 @@ local function selection(root,pattern,handle)
end
end
-xml.parse_parser = parser
-xml.parse_pattern = parse_pattern
-xml.parse_apply = parse_apply
xml.traverse = traverse -- old method, r, d, k
xml.selection = selection -- new method, simple handle
-local lpath = parse_pattern
-
-xml.lpath = lpath
-
-function xml.cached_patterns()
- return cache
-end
-- generic function finalizer (independant namespace)
@@ -6265,13 +8559,13 @@ local function dofunction(collected,fnc)
f(collected[c])
end
else
- logs.report("xml","unknown function '%s'",fnc)
+ report_lpath("unknown function '%s'",fnc)
end
end
end
-xml.finalizers.xml["function"] = dofunction
-xml.finalizers.tex["function"] = dofunction
+finalizers.xml["function"] = dofunction
+finalizers.tex["function"] = dofunction
-- functions
@@ -6363,14 +8657,6 @@ expressions.tag = function(e,n) -- only tg
end
end
---[[ldx--
-This is the main filter function. It returns whatever is asked for.
---ldx]]--
-
-function xml.filter(root,pattern) -- no longer funny attribute handling here
- return parse_apply({ root },pattern)
-end
-
--[[ldx--
Often using an iterators looks nicer in the code than passing handler
functions. The book describes how to use coroutines for that
@@ -6390,7 +8676,7 @@ end
local wrap, yield = coroutine.wrap, coroutine.yield
function xml.elements(root,pattern,reverse) -- r, d, k
- local collected = parse_apply({ root },pattern)
+ local collected = applylpath(root,pattern)
if collected then
if reverse then
return wrap(function() for c=#collected,1,-1 do
@@ -6406,7 +8692,7 @@ function xml.elements(root,pattern,reverse) -- r, d, k
end
function xml.collected(root,pattern,reverse) -- e
- local collected = parse_apply({ root },pattern)
+ local collected = applylpath(root,pattern)
if collected then
if reverse then
return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
@@ -6417,6 +8703,15 @@ function xml.collected(root,pattern,reverse) -- e
return wrap(function() end)
end
+-- handy
+
+function xml.inspect(collection,pattern)
+ pattern = pattern or "."
+ for e in xml.collected(collection,pattern or ".") do
+ report_lpath("pattern %q\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+
end -- of closure
@@ -6430,10 +8725,16 @@ if not modules then modules = { } end modules ['lxml-mis'] = {
license = "see context related readme files"
}
+local xml, lpeg, string = xml, lpeg, string
+
local concat = table.concat
local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
local format, gsub, match = string.format, string.gsub, string.match
-local lpegmatch = lpeg.match
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
+
+lpegpatterns.xml = lpegpatterns.xml or { }
+local xmlpatterns = lpegpatterns.xml
--[[ldx--
The following helper functions best belong to the lxml-ini
@@ -6457,9 +8758,8 @@ local function xmlgsub(t,old,new) -- will be replaced
end
end
---~ xml.gsub = xmlgsub
-function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
+function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
if d and k then
local dkm = d[k-1]
if dkm and type(dkm) == "string" then
@@ -6469,14 +8769,7 @@ function xml.strip_leading_spaces(dk,d,k) -- cosmetic, for manual
end
end
---~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' }
---~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
-
-local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
-- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
--
@@ -6502,9 +8795,9 @@ local unescaped = Cs(normal * (special * normal)^0)
local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
-xml.escaped_pattern = escaped
-xml.unescaped_pattern = unescaped
-xml.cleansed_pattern = cleansed
+xmlpatterns.escaped = escaped
+xmlpatterns.unescaped = unescaped
+xmlpatterns.cleansed = cleansed
function xml.escaped (str) return lpegmatch(escaped,str) end
function xml.unescaped(str) return lpegmatch(unescaped,str) end
@@ -6540,15 +8833,20 @@ if not modules then modules = { } end modules ['lxml-aux'] = {
local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
-local xmlparseapply, xmlconvert, xmlcopy, xmlname = xml.parse_apply, xml.convert, xml.copy, xml.name
+local report_xml = logs.reporter("xml")
+
+local xml = xml
+
+local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
local xmlinheritedconvert = xml.inheritedconvert
+local xmlapplylpath = xml.applylpath
-local type = type
-local insert, remove = table.insert, table.remove
+local type, setmetatable, getmetatable = type, setmetatable, getmetatable
+local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
local gmatch, gsub = string.gmatch, string.gsub
local function report(what,pattern,c,e)
- logs.report("xml","%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+ report_xml("%s element '%s' (root: '%s', position: %s, index: %s, pattern: %s)",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
end
local function withelements(e,handle,depth)
@@ -6602,10 +8900,8 @@ function xml.withelement(e,n,handle) -- slow
end
end
-xml.elements_only = xml.collected
-
-function xml.each_element(root,pattern,handle,reverse)
- local collected = xmlparseapply({ root },pattern)
+function xml.each(root,pattern,handle,reverse)
+ local collected = xmlapplylpath(root,pattern)
if collected then
if reverse then
for c=#collected,1,-1 do
@@ -6620,10 +8916,8 @@ function xml.each_element(root,pattern,handle,reverse)
end
end
-xml.process_elements = xml.each_element
-
-function xml.process_attributes(root,pattern,handle)
- local collected = xmlparseapply({ root },pattern)
+function xml.processattributes(root,pattern,handle)
+ local collected = xmlapplylpath(root,pattern)
if collected and handle then
for c=1,#collected do
handle(collected[c].at)
@@ -6638,12 +8932,12 @@ end
-- are these still needed -> lxml-cmp.lua
-function xml.collect_elements(root, pattern)
- return xmlparseapply({ root },pattern)
+function xml.collect(root, pattern)
+ return xmlapplylpath(root,pattern)
end
-function xml.collect_texts(root, pattern, flatten) -- todo: variant with handle
- local collected = xmlparseapply({ root },pattern)
+function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
+ local collected = xmlapplylpath(root,pattern)
if collected and flatten then
local xmltostring = xml.tostring
for c=1,#collected do
@@ -6654,18 +8948,19 @@ function xml.collect_texts(root, pattern, flatten) -- todo: variant with handle
end
function xml.collect_tags(root, pattern, nonamespace)
- local collected = xmlparseapply({ root },pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
- local t = { }
+ local t, n = { }, 0
for c=1,#collected do
local e = collected[c]
local ns, tg = e.ns, e.tg
+ n = n + 1
if nonamespace then
- t[#t+1] = tg
+ t[n] = tg
elseif ns == "" then
- t[#t+1] = tg
+ t[n] = tg
else
- t[#t+1] = ns .. ":" .. tg
+ t[n] = ns .. ":" .. tg
end
end
return t
@@ -6678,7 +8973,7 @@ end
local no_root = { no_root = true }
-function xml.redo_ni(d)
+local function redo_ni(d)
for k=1,#d do
local dk = d[k]
if type(dk) == "table" then
@@ -6701,12 +8996,7 @@ local function xmltoelement(whatever,root)
return whatever -- string
end
if element then
- --~ if element.ri then
- --~ element = element.dt[element.ri].dt
- --~ else
- --~ element = element.dt
- --~ end
- end
+ end
return element
end
@@ -6724,8 +9014,8 @@ local function copiedelement(element,newparent)
end
end
-function xml.delete_element(root,pattern)
- local collected = xmlparseapply({ root },pattern)
+function xml.delete(root,pattern)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -6736,15 +9026,15 @@ function xml.delete_element(root,pattern)
end
local d = p.dt
remove(d,e.ni)
- xml.redo_ni(d) -- can be made faster and inlined
+ redo_ni(d) -- can be made faster and inlined
end
end
end
end
-function xml.replace_element(root,pattern,whatever)
+function xml.replace(root,pattern,whatever)
local element = root and xmltoelement(whatever,root)
- local collected = element and xmlparseapply({ root },pattern)
+ local collected = element and xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -6755,15 +9045,50 @@ function xml.replace_element(root,pattern,whatever)
end
local d = p.dt
d[e.ni] = copiedelement(element,p)
- xml.redo_ni(d) -- probably not needed
+ redo_ni(d) -- probably not needed
+ end
+ end
+ end
+end
+
+local function wrap(e,wrapper)
+ local t = {
+ rn = e.rn,
+ tg = e.tg,
+ ns = e.ns,
+ at = e.at,
+ dt = e.dt,
+ __p__ = e,
+ }
+ setmetatable(t,getmetatable(e))
+ e.rn = wrapper.rn or e.rn or ""
+ e.tg = wrapper.tg or e.tg or ""
+ e.ns = wrapper.ns or e.ns or ""
+ e.at = fastcopy(wrapper.at)
+ e.dt = { t }
+end
+
+function xml.wrap(root,pattern,whatever)
+ if whatever then
+ local wrapper = xmltoelement(whatever,root)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ if trace_manipulations then
+ report('wrapping',pattern,c,e)
+ end
+ wrap(e,wrapper)
end
end
+ else
+ wrap(root,xmltoelement(pattern))
end
end
local function inject_element(root,pattern,whatever,prepend)
local element = root and xmltoelement(whatever,root)
- local collected = element and xmlparseapply({ root },pattern)
+ local collected = element and xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -6778,15 +9103,17 @@ local function inject_element(root,pattern,whatever,prepend)
else
be, af = edt, cp
end
+ local bn = #be
for i=1,#af do
- be[#be+1] = af[i]
+ bn = bn + 1
+ be[bn] = af[i]
end
if rri then
r.dt[rri].dt = be
else
d[k].dt = be
end
- xml.redo_ni(d)
+ redo_ni(d)
end
end
end
@@ -6794,7 +9121,7 @@ end
local function insert_element(root,pattern,whatever,before) -- todo: element als functie
local element = root and xmltoelement(whatever,root)
- local collected = element and xmlparseapply({ root },pattern)
+ local collected = element and xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -6804,24 +9131,23 @@ local function insert_element(root,pattern,whatever,before) -- todo: element als
k = k + 1
end
insert(d,k,copiedelement(element,r))
- xml.redo_ni(d)
+ redo_ni(d)
end
end
end
-xml.insert_element = insert_element
-xml.insert_element_after = insert_element
-xml.insert_element_before = function(r,p,e) insert_element(r,p,e,true) end
-xml.inject_element = inject_element
-xml.inject_element_after = inject_element
-xml.inject_element_before = function(r,p,e) inject_element(r,p,e,true) end
+xml.insert_element = insert_element
+xml.insertafter = insert_element
+xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
+xml.injectafter = inject_element
+xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
local function include(xmldata,pattern,attribute,recursive,loaddata)
-- parse="text" (default: xml), encoding="" (todo)
-- attribute = attribute or 'href'
pattern = pattern or 'include'
loaddata = loaddata or io.loaddata
- local collected = xmlparseapply({ xmldata },pattern)
+ local collected = xmlapplylpath(xmldata,pattern)
if collected then
for c=1,#collected do
local ek = collected[c]
@@ -6830,7 +9156,7 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
local ekat = ek.at
local epdt = ek.__p__.dt
if not attribute or attribute == "" then
- name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- ckeck, probably always tab or str
+ name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
end
if not name then
for a in gmatch(attribute or "href","([^|]+)") do
@@ -6845,9 +9171,6 @@ local function include(xmldata,pattern,attribute,recursive,loaddata)
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
---~ local settings = xmldata.settings
---~ settings.parent_root = xmldata -- to be tested
---~ local xi = xmlconvert(data,settings)
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
@@ -6864,136 +9187,88 @@ end
xml.include = include
---~ local function manipulate(xmldata,pattern,manipulator) -- untested and might go away
---~ local collected = xmlparseapply({ xmldata },pattern)
---~ if collected then
---~ local xmltostring = xml.tostring
---~ for c=1,#collected do
---~ local e = collected[c]
---~ local data = manipulator(xmltostring(e))
---~ if data == "" then
---~ epdt[e.ni] = ""
---~ else
---~ local xi = xmlinheritedconvert(data,xmldata)
---~ if not xi then
---~ epdt[e.ni] = ""
---~ else
---~ epdt[e.ni] = xml.body(xi) -- xml.assign(d,k,xi)
---~ end
---~ end
---~ end
---~ end
---~ end
-
---~ xml.manipulate = manipulate
-
-function xml.strip_whitespace(root, pattern, nolines) -- strips all leading and trailing space !
- local collected = xmlparseapply({ root },pattern)
- if collected then
- for i=1,#collected do
- local e = collected[i]
- local edt = e.dt
- if edt then
- local t = { }
- for i=1,#edt do
- local str = edt[i]
- if type(str) == "string" then
- if str == "" then
- -- stripped
- else
- if nolines then
- str = gsub(str,"[ \n\r\t]+"," ")
- end
- if str == "" then
- -- stripped
- else
- t[#t+1] = str
- end
- end
+local function stripelement(e,nolines,anywhere)
+ local edt = e.dt
+ if edt then
+ if anywhere then
+ local t, n = { }, 0
+ for e=1,#edt do
+ local str = edt[e]
+ if type(str) ~= "string" then
+ n = n + 1
+ t[n] = str
+ elseif str ~= "" then
+ -- todo: lpeg for each case
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if str ~= "" then
+ n = n + 1
+ t[n] = str
+ end
+ end
+ end
+ e.dt = t
+ else
+ -- we can assume a regular sparse xml table with no successive strings
+ -- otherwise we should use a while loop
+ if #edt > 0 then
+ -- strip front
+ local str = edt[1]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt,1)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s+","")
+ if str == "" then
+ remove(edt,1)
+ else
+ edt[1] = str
+ end
+ end
+ end
+ local nedt = #edt
+ if nedt > 0 then
+ -- strip end
+ local str = edt[nedt]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"%s+$","")
+ if str == "" then
+ remove(edt)
else
- --~ str.ni = i
- t[#t+1] = str
+ edt[nedt] = str
end
end
- e.dt = t
end
end
end
+ return e -- convenient
end
-function xml.strip_whitespace(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing
- local collected = xmlparseapply({ root },pattern) -- beware, indices no longer are valid now
+xml.stripelement = stripelement
+
+function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing
+ local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now
if collected then
for i=1,#collected do
- local e = collected[i]
- local edt = e.dt
- if edt then
- if anywhere then
- local t = { }
- for e=1,#edt do
- local str = edt[e]
- if type(str) ~= "string" then
- t[#t+1] = str
- elseif str ~= "" then
- -- todo: lpeg for each case
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s*(.-)%s*$","%1")
- if str ~= "" then
- t[#t+1] = str
- end
- end
- end
- e.dt = t
- else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt,1)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
- end
- end
- if #edt > 1 then
- -- strip end
- local str = edt[#edt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
- if str == "" then
- remove(edt)
- else
- edt[#edt] = str
- end
- end
- end
- end
- end
+ stripelement(collected[i],nolines,anywhere)
end
end
end
-local function rename_space(root, oldspace, newspace) -- fast variant
+local function renamespace(root, oldspace, newspace) -- fast variant
local ndt = #root.dt
for i=1,ndt or 0 do
local e = root[i]
@@ -7006,16 +9281,16 @@ local function rename_space(root, oldspace, newspace) -- fast variant
end
local edt = e.dt
if edt then
- rename_space(edt, oldspace, newspace)
+ renamespace(edt, oldspace, newspace)
end
end
end
end
-xml.rename_space = rename_space
+xml.renamespace = renamespace
-function xml.remap_tag(root, pattern, newtg)
- local collected = xmlparseapply({ root },pattern)
+function xml.remaptag(root, pattern, newtg)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
collected[c].tg = newtg
@@ -7023,8 +9298,8 @@ function xml.remap_tag(root, pattern, newtg)
end
end
-function xml.remap_namespace(root, pattern, newns)
- local collected = xmlparseapply({ root },pattern)
+function xml.remapnamespace(root, pattern, newns)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
collected[c].ns = newns
@@ -7032,8 +9307,8 @@ function xml.remap_namespace(root, pattern, newns)
end
end
-function xml.check_namespace(root, pattern, newns)
- local collected = xmlparseapply({ root },pattern)
+function xml.checknamespace(root, pattern, newns)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -7044,8 +9319,8 @@ function xml.check_namespace(root, pattern, newns)
end
end
-function xml.remap_name(root, pattern, newtg, newns, newrn)
- local collected = xmlparseapply({ root },pattern)
+function xml.remapname(root, pattern, newtg, newns, newrn)
+ local collected = xmlapplylpath(root,pattern)
if collected then
for c=1,#collected do
local e = collected[c]
@@ -7058,18 +9333,34 @@ end
Here are a few synonyms.
--ldx]]--
-xml.each = xml.each_element
-xml.process = xml.process_element
-xml.strip = xml.strip_whitespace
-xml.collect = xml.collect_elements
-xml.all = xml.collect_elements
-
-xml.insert = xml.insert_element_after
-xml.inject = xml.inject_element_after
-xml.after = xml.insert_element_after
-xml.before = xml.insert_element_before
-xml.delete = xml.delete_element
-xml.replace = xml.replace_element
+xml.all = xml.each
+xml.insert = xml.insertafter
+xml.inject = xml.injectafter
+xml.after = xml.insertafter
+xml.before = xml.insertbefore
+xml.process = xml.each
+
+-- obsolete
+
+xml.obsolete = xml.obsolete or { }
+local obsolete = xml.obsolete
+
+xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip
+xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect
+xml.delete_element = xml.delete obsolete.delete_element = xml.delete
+xml.replace_element = xml.replace obsolete.replace_element = xml.replacet
+xml.each_element = xml.each obsolete.each_element = xml.each
+xml.process_elements = xml.process obsolete.process_elements = xml.process
+xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter
+xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore
+xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter
+xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore
+xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes
+xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts
+xml.inject_element = xml.inject obsolete.inject_element = xml.inject
+xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag
+xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
+xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
end -- of closure
@@ -7084,10 +9375,15 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
+local concat = string.concat
+
+local xml = xml
+
local finalizers = xml.finalizers.xml
local xmlfilter = xml.filter -- we could inline this one for speed
local xmltostring = xml.tostring
local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
local function first(collected) -- wrong ?
return collected and collected[1]
@@ -7101,15 +9397,8 @@ local function all(collected)
return collected
end
-local function reverse(collected)
- if collected then
- local reversed = { }
- for c=#collected,1,-1 do
- reversed[#reversed+1] = collected[c]
- end
- return reversed
- end
-end
+
+local reverse = table.reversed
local function attribute(collected,name)
if collected and #collected > 0 then
@@ -7200,11 +9489,12 @@ end
local function texts(collected)
if collected then
- local t = { }
+ local t, n = { }, 0
for c=1,#collected do
- local e = collection[c]
+ local e = collected[c]
if e and e.dt then
- t[#t+1] = e.dt
+ n = n + 1
+ t[n] = e.dt
end
end
return t
@@ -7247,14 +9537,15 @@ end
local function tags(collected,nonamespace)
if collected then
- local t = { }
+ local t, n = { }, 0
for c=1,#collected do
local e = collected[c]
local ns, tg = e.ns, e.tg
+ n = n + 1
if nonamespace or ns == "" then
- t[#t+1] = tg
+ t[n] = tg
else
- t[#t+1] = ns .. ":" .. tg
+ t[n] = ns .. ":" .. tg
end
end
return t
@@ -7365,1812 +9656,1937 @@ xml.all = xml.filter
xml.index = xml.position
xml.found = xml.filter
+-- a nice one:
+
+local function totable(x)
+ local t = { }
+ for e in xmlcollected(x[1] or x,"/*") do
+ t[e.tg] = xmltostring(e.dt) or ""
+ end
+ return next(t) and t or nil
+end
+
+xml.table = totable
+finalizers.table = totable
+
+local function textonly(e,t)
+ if e then
+ local edt = e.dt
+ if edt then
+ for i=1,#edt do
+ local e = edt[i]
+ if type(e) == "table" then
+ textonly(e,t)
+ else
+ t[#t+1] = e
+ end
+ end
+ end
+ end
+ return t
+end
+
+function xml.textonly(e) -- no pattern
+ return concat(textonly(e,{}))
+end
+
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['luat-env'] = {
+if not modules then modules = { } end modules ['data-ini'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+ license = "see context related readme files",
}
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
+local gsub, find, gmatch = string.gsub, string.find, string.gmatch
+local concat = table.concat
+local next, type = next, type
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquote, quote = string.unquote, string.quote
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
--- precautions
+local report_initialization = logs.reporter("resolvers","initialization")
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv
-function os.setlocale()
- -- no way you can mess with it
-end
+-- The code here used to be part of a data-res but for convenience
+-- we now split it over multiple files. As this file is now the
+-- starting point we introduce resolvers here.
--- dirty tricks
+resolvers = resolvers or { }
+local resolvers = resolvers
+
+-- We don't want the kpse library to kick in. Also, we want to be able to
+-- execute programs. Control over execution is implemented later.
+
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+
+kpse = { original = kpse }
+
+setmetatable(kpse, {
+ __index = function(kp,name)
+ report_initialization("fatal error: kpse library is accessed (key: %s)",name)
+ os.exit()
+ end
+} )
+
+-- First we check a couple of environment variables. Some might be
+-- set already but we need then later on. We start with the system
+-- font path.
+
+do
+
+ local osfontdir = osgetenv("OSFONTDIR")
+
+ if osfontdir and osfontdir ~= "" then
+ -- ok
+ elseif osname == "windows" then
+ ossetenv("OSFONTDIR","c:/windows/fonts//")
+ elseif osname == "macosx" then
+ ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
+ end
-if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then
- arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil
end
-if profiler and os.env["MTX_PROFILE_RUN"] == "YES" then
- profiler.start("luatex-profile.log")
+-- Next comes the user's home path. We need this as later on we have
+-- to replace ~ with its value.
+
+do
+
+ local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or ''
+
+ if not homedir or homedir == "" then
+ homedir = string.char(127) -- we need a value, later we wil trigger on it
+ end
+
+ homedir = file.collapsepath(homedir)
+
+ ossetenv("HOME", homedir) -- can be used in unix cnf files
+ ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files
+
+ environment.homedir = homedir
+
end
--- environment
+-- The following code sets the name of the own binary and its
+-- path. This is fallback code as we have os.selfdir now.
-environment = environment or { }
-environment.arguments = { }
-environment.files = { }
-environment.sortedflags = nil
+do
-if not environment.jobname or environment.jobname == "" then if tex then environment.jobname = tex.jobname end end
-if not environment.version or environment.version == "" then environment.version = "unknown" end
-if not environment.jobname then environment.jobname = "unknown" end
+ local args = environment.originalarguments or arg -- this needs a cleanup
-function environment.initialize_arguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- arguments[flag] = unquote(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- arguments[flag] = true
- else
- files[#files+1] = argument
+ local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
+ local ownpath = environment.ownpath or os.selfdir
+
+ ownbin = file.collapsepath(ownbin)
+ ownpath = file.collapsepath(ownpath)
+
+ if not ownpath or ownpath == "" or ownpath == "unset" then
+ ownpath = args[-1] or arg[-1]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
+ if not ownpath or ownpath == "" then
+ ownpath = args[-0] or arg[-0]
+ ownpath = ownpath and filedirname(gsub(ownpath,"\\","/"))
+ end
+ local binary = ownbin
+ if not ownpath or ownpath == "" then
+ ownpath = ownpath and filedirname(binary)
+ end
+ if not ownpath or ownpath == "" then
+ if os.binsuffix ~= "" then
+ binary = file.replacesuffix(binary,os.binsuffix)
+ end
+ local path = osgetenv("PATH")
+ if path then
+ for p in gmatch(path,"[^"..io.pathseparator.."]+") do
+ local b = filejoin(p,binary)
+ if lfs.isfile(b) then
+ -- we assume that after changing to the path the currentdir function
+ -- resolves to the real location and use this side effect here; this
+ -- trick is needed because on the mac installations use symlinks in the
+ -- path instead of real locations
+ local olddir = lfs.currentdir()
+ if lfs.chdir(p) then
+ local pp = lfs.currentdir()
+ if trace_locating and p ~= pp then
+ report_initialization("following symlink '%s' to '%s'",p,pp)
+ end
+ ownpath = pp
+ lfs.chdir(olddir)
+ else
+ if trace_locating then
+ report_initialization("unable to check path '%s'",p)
+ end
+ ownpath = p
+ end
+ break
+ end
end
end
end
+ if not ownpath or ownpath == "" then
+ ownpath = "."
+ report_initialization("forcing fallback ownpath .")
+ elseif trace_locating then
+ report_initialization("using ownpath '%s'",ownpath)
+ end
end
- environment.ownname = environment.ownname or arg[0] or 'unknown.lua'
+
+ environment.ownbin = ownbin
+ environment.ownpath = ownpath
+
end
-function environment.setargument(name,value)
- environment.arguments[name] = value
+resolvers.ownpath = environment.ownpath
+
+function resolvers.getownpath()
+ return environment.ownpath
end
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
+-- The self variables permit us to use only a few (or even no)
+-- environment variables.
-function environment.argument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = table.sortedkeys(arguments)
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
+do
+
+ local ownpath = environment.ownpath or dir.current()
+
+ if ownpath then
+ ossetenv('SELFAUTOLOC', file.collapsepath(ownpath))
+ ossetenv('SELFAUTODIR', file.collapsepath(ownpath .. "/.."))
+ ossetenv('SELFAUTOPARENT', file.collapsepath(ownpath .. "/../.."))
+ else
+ report_initialization("error: unable to locate ownpath")
+ os.exit()
end
- return nil
+
end
-environment.argument("x",true)
+-- The running os:
-function environment.split_arguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local original_arguments = environment.original_arguments
- for k=1,#original_arguments do
- local v = original_arguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
+-- todo: check is context sits here os.platform is more trustworthy
+-- that the bin check as mtx-update runs from another path
+
+local texos = environment.texos or osgetenv("TEXOS")
+local texmfos = environment.texmfos or osgetenv('SELFAUTODIR')
+
+if not texos or texos == "" then
+ texos = file.basename(texmfos)
end
-function environment.reconstruct_commandline(arg,noquote)
- arg = arg or environment.original_arguments
- if noquote and #arg == 1 then
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquote(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquote(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quote(a)
- else
- result[#result+1] = a
- end
- end
- return table.join(result," ")
- else
- return ""
+ossetenv('TEXMFOS', texmfos) -- full bin path
+ossetenv('TEXOS', texos) -- partial bin parent
+ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus
+
+environment.texos = texos
+environment.texmfos = texmfos
+
+-- The current root:
+
+local texroot = environment.texroot or osgetenv("TEXROOT")
+
+if not texroot or texroot == "" then
+ texroot = osgetenv('SELFAUTOPARENT')
+ ossetenv('TEXROOT',texroot)
+end
+
+environment.texroot = file.collapsepath(texroot)
+
+-- Tracing. Todo ...
+
+function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
+ if n then
+ trackers.disable("resolvers.*")
+ trackers.enable("resolvers."..n)
end
end
-if arg then
+resolvers.settrace(osgetenv("MTX_INPUT_TRACE"))
+
+-- todo:
+
+-- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then
+-- profiler.start("luatex-profile.log")
+-- end
+
+-- a forward definition
+
+if not resolvers.resolve then
+ function resolvers.resolve (s) return s end
+ function resolvers.unresolve(s) return s end
+ function resolvers.repath (s) return s end
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-exp'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
+local concat, sort = table.concat, table.sort
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
+local type, next = type, next
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
+local ostype = os.type
+local collapsepath = file.collapsepath
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
- environment.initialize_arguments(newarg)
- environment.original_arguments = newarg
- environment.raw_arguments = arg
+local report_expansions = logs.reporter("resolvers","expansions")
- arg = { } -- prevent duplicate handling
+local resolvers = resolvers
-end
+-- As this bit of code is somewhat special it gets its own module. After
+-- all, when working on the main resolver code, I don't want to scroll
+-- past this every time. See data-obs.lua for the gsub variant.
--- weird place ... depends on a not yet loaded module
+-- {a,b,c,d}
+-- a,b,c/{p,q,r},d
+-- a,b,c/{p,q,r}/d/{x,y,z}//
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
+-- a{b,c}{d,e}f
+-- {a,b,c,d}
+-- {a,b,c/{p,q,r},d}
+-- {a,b,c/{p,q,r}/d/{x,y,z}//}
+-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
+-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
+-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-function environment.texfile(filename)
- return resolvers.find_file(filename,'tex')
+local function f_first(a,b)
+ local t, n = { }, 0
+ for s in gmatch(b,"[^,]+") do
+ n = n + 1 ; t[n] = a .. s
+ end
+ return concat(t,",")
end
-function environment.luafile(filename)
- local resolved = resolvers.find_file(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
+local function f_second(a,b)
+ local t, n = { }, 0
+ for s in gmatch(a,"[^,]+") do
+ n = n + 1 ; t[n] = s .. b
end
- resolved = resolvers.find_file(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
+ return concat(t,",")
+end
+
+local function f_both(a,b)
+ local t, n = { }, 0
+ for sa in gmatch(a,"[^,]+") do
+ for sb in gmatch(b,"[^,]+") do
+ n = n + 1 ; t[n] = sa .. sb
+ end
end
- return resolvers.find_file(filename,'luatexlibs') or ""
+ return concat(t,",")
end
-environment.loadedluacode = loadfile -- can be overloaded
+local left = P("{")
+local right = P("}")
+local var = P((1 - S("{}" ))^0)
+local set = P((1 - S("{},"))^0)
+local other = P(1)
---~ function environment.loadedluacode(name)
---~ if os.spawn("texluac -s -o texluac.luc " .. name) == 0 then
---~ local chunk = loadstring(io.loaddata("texluac.luc"))
---~ os.remove("texluac.luc")
---~ return chunk
---~ else
---~ environment.loadedluacode = loadfile -- can be overloaded
---~ return loadfile(name)
---~ end
---~ end
-
-function environment.luafilechunk(filename) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading file %s", fullname)
+local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
+local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
+local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
+local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
+
+local stripper_1 = lpeg.stripper ("{}@")
+local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
+
+local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
+ if trace_expansions then
+ report_expansions("expanding variable '%s'",str)
+ end
+ local t, ok, done = newlist or { }, false, false
+ local n = #t
+ str = lpegmatch(replacer_1,str)
+ repeat local old = str
+ repeat local old = str ; str = lpegmatch(l_first, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_second,str) until old == str
+ repeat local old = str ; str = lpegmatch(l_both, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_rest, str) until old == str
+ until old == str -- or not find(str,"{")
+ str = lpegmatch(stripper_1,str)
+ if validate then
+ for s in gmatch(str,"[^,]+") do
+ s = validate(s)
+ if s then
+ n = n + 1 ; t[n] = s
+ end
end
- return environment.loadedluacode(fullname)
else
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
+ for s in gmatch(str,"[^,]+") do
+ n = n + 1 ; t[n] = s
+ end
+ end
+ if trace_expansions then
+ for k=1,#t do
+ report_expansions("% 4i: %s",k,t[k])
end
- return nil
end
+ return t
end
--- the next ones can use the previous ones / combine
+-- We could make the previous one public.
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- lucname, luaname = basename .. ".luc", basename .. ".lua"
- else
- lucname, luaname = nil, basename -- forced suffix
- end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
+local function validate(s)
+ s = collapsepath(s) -- already keeps the //
+ return s ~= "" and not find(s,"^!*unset/*$") and s
+end
+
+resolvers.validatedpath = validate -- keeps the trailing //
+
+function resolvers.expandedpathfromlist(pathlist)
+ local newlist = { }
+ for k=1,#pathlist do
+ splitpathexpr(pathlist[k],newlist,validate)
end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
+ return newlist
+end
+
+local cleanup = lpeg.replacer {
+ { "!" , "" },
+ { "\\" , "/" },
+}
+
+local homedir
+
+function resolvers.cleanpath(str)
+ if not homedir then
+ homedir = lpegmatch(cleanup,environment.homedir or "")
+ if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then
+ if trace_expansions then
+ report_expansions("no home dir set, ignoring dependent paths")
end
- if v == version then
- return true
- else
- if trace_locating then
- logs.report("fileio","version mismatch for %s: lua=%s, luc=%s", filename, v, version)
+ function resolvers.cleanpath(str)
+ if find(str,"~") then
+ return "" -- special case
+ else
+ return str and lpegmatch(cleanup,str)
end
- environment.loadluafile(filename)
end
else
- return true
- end
- end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- logs.report("fileio","loading %s", fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- logs.report("fileio","unknown file %s", filename)
+ cleanup = lpeg.replacer {
+ { "!" , "" },
+ { "\\" , "/" },
+ { "~" , homedir },
+ }
+ function resolvers.cleanpath(str)
+ return str and lpegmatch(cleanup,str)
end
- else
- assert(chunk)()
- return true
end
end
- return false
+ return resolvers.cleanpath(str)
end
+-- This one strips quotes and funny tokens.
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
+local expandhome = P("~") / "$HOME" -- environment.homedir
-if not modules then modules = { } end modules ['trac-inf'] = {
- version = 1.001,
- comment = "companion to trac-inf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
+local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
+local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
+local dostring = (expandhome + 1 )^0
-local format = string.format
+local stripper = Cs(
+ lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
+)
-local statusinfo, n, registered = { }, 0, { }
+function resolvers.checkedvariable(str) -- assumes str is a string
+ return lpegmatch(stripper,str) or str
+end
-statistics = statistics or { }
+-- The path splitter:
-statistics.enable = true
-statistics.threshold = 0.05
+-- A config (optionally) has the paths split in tables. Internally
+-- we join them and split them after the expansion has taken place. This
+-- is more convenient.
--- timing functions
+local cache = { }
-local clock = os.gettimeofday or os.clock
+---- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do :
-local notimer
+local backslashswapper = lpeg.replacer("\\","/")
-function statistics.hastimer(instance)
- return instance and instance.starttime
+local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
+ if str then
+ local found = cache[str]
+ if not found then
+ if str == "" then
+ found = { }
+ else
+ local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
+ found = { }
+ local noffound = 0
+ for i=1,#split do
+ local s = split[i]
+ if not find(s,"^{*unset}*") then
+ noffound = noffound + 1
+ found[noffound] = s
+ end
+ end
+ if trace_expansions then
+ report_expansions("splitting path specification '%s'",str)
+ for k=1,noffound do
+ report_expansions("% 4i: %s",k,found[k])
+ end
+ end
+ cache[str] = found
+ end
+ end
+ return found
+ end
end
-function statistics.resettiming(instance)
- if not instance then
- notimer = { timing = 0, loadtime = 0 }
+resolvers.splitconfigurationpath = splitconfigurationpath
+
+function resolvers.splitpath(str)
+ if type(str) == 'table' then
+ return str
else
- instance.timing, instance.loadtime = 0, 0
+ return splitconfigurationpath(str)
end
end
-function statistics.starttiming(instance)
- if not instance then
- notimer = { }
- instance = notimer
- end
- local it = instance.timing
- if not it then
- it = 0
- end
- if it == 0 then
- instance.starttime = clock()
- if not instance.loadtime then
- instance.loadtime = 0
- end
+function resolvers.joinpath(str)
+ if type(str) == 'table' then
+ return file.joinpath(str)
else
---~ logs.report("system","nested timing (%s)",tostring(instance))
+ return str
end
- instance.timing = it + 1
end
-function statistics.stoptiming(instance, report)
- if not instance then
- instance = notimer
- end
- if instance then
- local it = instance.timing
- if it > 1 then
- instance.timing = it - 1
- else
- local starttime = instance.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- instance.stoptime = stoptime
- instance.loadtime = instance.loadtime + loadtime
- if report then
- statistics.report("load time %0.3f",loadtime)
+-- The next function scans directories and returns a hash where the
+-- entries are either strings or tables.
+
+-- starting with . or .. etc or funny char
+
+
+
+
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+
+local attributes, directory = lfs.attributes, lfs.dir
+
+local function scan(files,spec,path,n,m,r)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs, nofdirs = { }, 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
+ else
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
+ end
+ elseif mode == 'directory' then
+ m = m + 1
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
end
- instance.timing = 0
- return loadtime
end
end
end
- return 0
-end
-
-function statistics.elapsedtime(instance)
- if not instance then
- instance = notimer
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files, n, m, r = scan(files,spec,dirs[i],n,m,r)
+ end
end
- return format("%0.3f",(instance and instance.loadtime) or 0)
+ return files, n, m, r
end
-function statistics.elapsedindeed(instance)
- if not instance then
- instance = notimer
+function resolvers.scanfiles(path,branch)
+ if trace_locating then
+ report_expansions("scanning path '%s', branch '%s'",path, branch or path)
+ end
+ local realpath = resolvers.resolve(path) -- no shortcut
+ local files, n, m, r = scan({ },realpath .. '/',"",0,0,0)
+ files.__path__ = path -- can be selfautoparent:texmf-whatever
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
+ if trace_locating then
+ report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
- local t = (instance and instance.loadtime) or 0
- return t > statistics.threshold
+ return files
end
-function statistics.elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if statistics.elapsedindeed(instance) then
- return format("%s seconds %s", statistics.elapsedtime(instance),rest or "")
- end
-end
--- general function
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
-end
+end -- of closure
-function statistics.show(reporter)
- if statistics.enable then
- if not reporter then reporter = function(tag,data,n) texio.write_nl(tag .. " " .. data) end end
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return string.lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s", status.cs_count, status.hash_size+status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("direct: %s, indirect: %s, total: %s", total-indirect, indirect, total)
- end)
- register("current memory usage", statistics.memused)
- register("runtime",statistics.runtime)
--- --
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- reporter(s[1],r,n)
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+local lower, gsub = string.lower, string.gsub
+
+local resolvers = resolvers
+
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+local fileextname = file.extname
+
+local formats = allocate()
+local suffixes = allocate()
+local dangerous = allocate()
+local suffixmap = allocate()
+
+resolvers.formats = formats
+resolvers.suffixes = suffixes
+resolvers.dangerous = dangerous
+resolvers.suffixmap = suffixmap
+
+local relations = allocate { -- todo: handlers also here
+ core = {
+ ofm = { -- will become obsolete
+ names = { "ofm", "omega font metric", "omega font metrics" },
+ variable = 'OFMFONTS',
+ suffixes = { 'ofm', 'tfm' },
+ },
+ ovf = { -- will become obsolete
+ names = { "ovf", "omega virtual font", "omega virtual fonts" },
+ variable = 'OVFFONTS',
+ suffixes = { 'ovf', 'vf' },
+ },
+ tfm = {
+ names = { "tfm", "tex font metric", "tex font metrics" },
+ variable = 'TFMFONTS',
+ suffixes = { 'tfm' },
+ },
+ vf = {
+ names = { "vf", "virtual font", "virtual fonts" },
+ variable = 'VFFONTS',
+ suffixes = { 'vf' },
+ },
+ otf = {
+ names = { "otf", "opentype", "opentype font", "opentype fonts"},
+ variable = 'OPENTYPEFONTS',
+ suffixes = { 'otf' },
+ },
+ ttf = {
+ names = { "ttf", "truetype", "truetype font", "truetype fonts", "truetype collection", "truetype collections", "truetype dictionary", "truetype dictionaries" },
+ variable = 'TTFONTS',
+ suffixes = { 'ttf', 'ttc', 'dfont' },
+ },
+ afm = {
+ names = { "afm", "adobe font metric", "adobe font metrics" },
+ variable = "AFMFONTS",
+ suffixes = { "afm" },
+ },
+ pfb = {
+ names = { "pfb", "type1", "type 1", "type1 font", "type 1 font", "type1 fonts", "type 1 fonts" },
+ variable = 'T1FONTS',
+ suffixes = { 'pfb', 'pfa' },
+ },
+ fea = {
+ names = { "fea", "font feature", "font features", "font feature file", "font feature files" },
+ variable = 'FONTFEATURES',
+ suffixes = { 'fea' },
+ },
+ cid = {
+ names = { "cid", "cid map", "cid maps", "cid file", "cid files" },
+ variable = 'FONTCIDMAPS',
+ suffixes = { 'cid', 'cidmap' },
+ },
+ fmt = {
+ names = { "fmt", "format", "tex format" },
+ variable = 'TEXFORMATS',
+ suffixes = { 'fmt' },
+ },
+ mem = { -- will become obsolete
+ names = { 'mem', "metapost format" },
+ variable = 'MPMEMS',
+ suffixes = { 'mem' },
+ },
+ mp = {
+ names = { "mp" },
+ variable = 'MPINPUTS',
+ suffixes = { 'mp' },
+ },
+ tex = {
+ names = { "tex" },
+ variable = 'TEXINPUTS',
+ suffixes = { 'tex', "mkiv", "mkiv", "mkii" },
+ },
+ icc = {
+ names = { "icc", "icc profile", "icc profiles" },
+ variable = 'ICCPROFILES',
+ suffixes = { 'icc' },
+ },
+ texmfscripts = {
+ names = { "texmfscript", "texmfscripts", "script", "scripts" },
+ variable = 'TEXMFSCRIPTS',
+ suffixes = { 'rb', 'pl', 'py' },
+ },
+ lua = {
+ names = { "lua" },
+ variable = 'LUAINPUTS',
+ suffixes = { 'lua', 'luc', 'tma', 'tmc' },
+ },
+ lib = {
+ names = { "lib" },
+ variable = 'CLUAINPUTS',
+ suffixes = os.libsuffix and { os.libsuffix } or { 'dll', 'so' },
+ },
+ bib = {
+ names = { 'bib' },
+ suffixes = { 'bib' },
+ },
+ bst = {
+ names = { 'bst' },
+ suffixes = { 'bst' },
+ },
+ fontconfig = {
+ names = { 'fontconfig', 'fontconfig file', 'fontconfig files' },
+ variable = 'FONTCONFIG_PATH',
+ },
+ },
+ obsolete = {
+ enc = {
+ names = { "enc", "enc files", "enc file", "encoding files", "encoding file" },
+ variable = 'ENCFONTS',
+ suffixes = { 'enc' },
+ },
+ map = {
+ names = { "map", "map files", "map file" },
+ variable = 'TEXFONTMAPS',
+ suffixes = { 'map' },
+ },
+ lig = {
+ names = { "lig files", "lig file", "ligature file", "ligature files" },
+ variable = 'LIGFONTS',
+ suffixes = { 'lig' },
+ },
+ opl = {
+ names = { "opl" },
+ variable = 'OPLFONTS',
+ suffixes = { 'opl' },
+ },
+ ovp = {
+ names = { "ovp" },
+ variable = 'OVPFONTS',
+ suffixes = { 'ovp' },
+ },
+ },
+ kpse = { -- subset
+ base = {
+ names = { 'base', "metafont format" },
+ variable = 'MFBASES',
+ suffixes = { 'base', 'bas' },
+ },
+ cmap = {
+ names = { 'cmap', 'cmap files', 'cmap file' },
+ variable = 'CMAPFONTS',
+ suffixes = { 'cmap' },
+ },
+ cnf = {
+ names = { 'cnf' },
+ suffixes = { 'cnf' },
+ },
+ web = {
+ names = { 'web' },
+ suffixes = { 'web', 'ch' }
+ },
+ cweb = {
+ names = { 'cweb' },
+ suffixes = { 'w', 'web', 'ch' },
+ },
+ gf = {
+ names = { 'gf' },
+ suffixes = { 'gf' },
+ },
+ mf = {
+ names = { 'mf' },
+ variable = 'MFINPUTS',
+ suffixes = { 'mf' },
+ },
+ mft = {
+ names = { 'mft' },
+ suffixes = { 'mft' },
+ },
+ pk = {
+ names = { 'pk' },
+ suffixes = { 'pk' },
+ },
+ },
+}
+
+resolvers.relations = relations
+
+-- formats: maps a format onto a variable
+
+function resolvers.updaterelations()
+ for category, categories in next, relations do
+ for name, relation in next, categories do
+ local rn = relation.names
+ local rv = relation.variable
+ local rs = relation.suffixes
+ if rn and rv then
+ for i=1,#rn do
+ local rni = lower(gsub(rn[i]," ",""))
+ formats[rni] = rv
+ if rs then
+ suffixes[rni] = rs
+ for i=1,#rs do
+ local rsi = rs[i]
+ suffixmap[rsi] = rni
+ end
+ end
+ end
+ end
+ if rs then
end
end
- texio.write_nl("") -- final newline
- statistics.enable = false
end
end
-function statistics.show_job_stat(tag,data,n)
- texio.write_nl(format("%-15s: %s - %s","mkiv lua stats",tag:rpadd(n," "),data))
+resolvers.updaterelations() -- push this in the metatable -> newindex
+
+local function simplified(t,k)
+ return rawget(t,lower(gsub(k," ","")))
end
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
+setmetatableindex(formats, simplified)
+setmetatableindex(suffixes, simplified)
+setmetatableindex(suffixmap, simplified)
+
+-- A few accessors, mostly for command line tool.
+
+function resolvers.suffixofformat(str)
+ local s = suffixes[str]
+ return s and s[1] or ""
end
-if statistics.runtime then
- -- already loaded and set
-elseif luatex and luatex.starttime then
- statistics.starttime = luatex.starttime
- statistics.loadtime = 0
- statistics.timing = 0
-else
- statistics.starttiming(statistics)
+function resolvers.suffixofformat(str)
+ return suffixes[str] or { }
end
-function statistics.runtime()
- statistics.stoptiming(statistics)
- return statistics.formatruntime(statistics.elapsedtime(statistics))
+for name, format in next, formats do
+ dangerous[name] = true -- still needed ?
end
-function statistics.formatruntime(runtime)
- return format("%s seconds", statistics.elapsedtime(statistics))
-end
+-- because vf searching is somewhat dangerous, we want to prevent
+-- too liberal searching esp because we do a lookup on the current
+-- path anyway; only tex (or any) is safe
+
+dangerous.tex = nil
-function statistics.timed(action,report)
- local timer = { }
- report = report or logs.simple
- statistics.starttiming(timer)
- action()
- statistics.stoptiming(timer)
- report("total runtime: %s",statistics.elapsedtime(timer))
-end
--- where, not really the best spot for this:
+-- more helpers
-commands = commands or { }
+function resolvers.formatofvariable(str)
+ return formats[str] or ''
+end
-local timer
+function resolvers.formatofsuffix(str) -- of file
+ return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc)
+end
-function commands.resettimer()
- statistics.resettiming(timer)
- statistics.starttiming(timer)
+function resolvers.variableofformat(str)
+ return formats[str] or ''
end
-function commands.elapsedtime()
- statistics.stoptiming(timer)
- tex.sprint(statistics.elapsedtime(timer))
+function resolvers.variableofformatorsuffix(str)
+ local v = formats[str]
+ if v then
+ return v
+ end
+ v = suffixmap[fileextname(str)]
+ if v then
+ return formats[v]
+ end
+ return ''
end
-commands.resettimer()
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['trac-log'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
+if not modules then modules = { } end modules ['data-tmp'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
}
--- this is old code that needs an overhaul
-
---~ io.stdout:setvbuf("no")
---~ io.stderr:setvbuf("no")
-
-local write_nl, write = texio.write_nl or print, texio.write or io.write
-local format, gmatch = string.format, string.gmatch
-local texcount = tex and tex.count
-
-if texlua then
- write_nl = print
- write = io.write
-end
-
--[[ldx--
-This is a prelude to a more extensive logging module. For the sake
-of parsing log files, in addition to the standard logging we will
-provide an structured file. Actually, any logging that
-is hooked into callbacks will be \XML\ by default.
---ldx]]--
+This module deals with caching data. It sets up the paths and
+implements loaders and savers for tables. Best is to set the
+following variable. When not set, the usual paths will be
+checked. Personally I prefer the (users) temporary path.
-logs = logs or { }
-logs.xml = logs.xml or { }
-logs.tex = logs.tex or { }
+
+TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
+
---[[ldx--
-This looks pretty ugly but we need to speed things up a bit.
+Currently we do no locking when we write files. This is no real
+problem because most caching involves fonts and the chance of them
+being written at the same time is small. We also need to extend
+luatools with a recache feature.
--ldx]]--
-logs.moreinfo = [[
-more information about ConTeXt and the tools that come with it can be found at:
+local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat
+local mkdirs, isdir = dir.mkdirs, lfs.isdir
-maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
-wiki : http://contextgarden.net
-]]
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-logs.levels = {
- ['error'] = 1,
- ['warning'] = 2,
- ['info'] = 3,
- ['debug'] = 4,
-}
+local report_caches = logs.reporter("resolvers","caches")
+local report_resolvers = logs.reporter("resolvers","caching")
-logs.functions = {
- 'report', 'start', 'stop', 'push', 'pop', 'line', 'direct',
- 'start_run', 'stop_run',
- 'start_page_number', 'stop_page_number',
- 'report_output_pages', 'report_output_log',
- 'report_tex_stat', 'report_job_stat',
- 'show_open', 'show_close', 'show_load',
-}
+local resolvers = resolvers
-logs.tracers = {
-}
+-- intermezzo
-logs.level = 0
-logs.mode = string.lower((os.getenv("MTX.LOG.MODE") or os.getenv("MTX_LOG_MODE") or "tex"))
+local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end)
+local directive_strip = true directives.register("system.compile.strip", function(v) directive_strip = v end)
-function logs.set_level(level)
- logs.level = logs.levels[level] or level
-end
+local compile = utilities.lua.compile
-function logs.set_method(method)
- for _, v in next, logs.functions do
- logs[v] = logs[method][v] or function() end
- end
+function utilities.lua.compile(luafile,lucfile,cleanup,strip)
+ if cleanup == nil then cleanup = directive_cleanup end
+ if strip == nil then strip = directive_strip end
+ return compile(luafile,lucfile,cleanup,strip)
end
--- tex logging
+-- end of intermezzo
-function logs.tex.report(category,fmt,...) -- new
- if fmt then
- write_nl(category .. " | " .. format(fmt,...))
- else
- write_nl(category .. " |")
- end
-end
+caches = caches or { }
+local caches = caches
-function logs.tex.line(fmt,...) -- new
- if fmt then
- write_nl(format(fmt,...))
+caches.base = caches.base or "luatex-cache"
+caches.more = caches.more or "context"
+caches.direct = false -- true is faster but may need huge amounts of memory
+caches.tree = false
+caches.force = true
+caches.ask = false
+caches.relocate = false
+caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
+
+local writable, readables, usedreadables = nil, { }, { }
+
+-- we could use a metatable for writable and readable but not yet
+
+local function identify()
+ -- Combining the loops makes it messy. First we check the format cache path
+ -- and when the last component is not present we try to create it.
+ local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE")
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ if cachepath ~= "" then
+ cachepath = resolvers.resolve(cachepath)
+ cachepath = resolvers.cleanpath(cachepath)
+ cachepath = file.collapsepath(cachepath)
+ local valid = isdir(cachepath)
+ if valid then
+ if file.is_readable(cachepath) then
+ readables[#readables+1] = cachepath
+ if not writable and file.is_writable(cachepath) then
+ writable = cachepath
+ end
+ end
+ elseif not writable and caches.force then
+ local cacheparent = file.dirname(cachepath)
+ if file.is_writable(cacheparent) then
+ if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
+ mkdirs(cachepath)
+ if isdir(cachepath) and file.is_writable(cachepath) then
+ report_caches("created: %s",cachepath)
+ writable = cachepath
+ readables[#readables+1] = cachepath
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ -- As a last resort we check some temporary paths but this time we don't
+ -- create them.
+ local texmfcaches = caches.defaults
+ if texmfcaches then
+ for k=1,#texmfcaches do
+ local cachepath = texmfcaches[k]
+ cachepath = resolvers.expansion(cachepath) -- was getenv
+ if cachepath ~= "" then
+ cachepath = resolvers.resolve(cachepath)
+ cachepath = resolvers.cleanpath(cachepath)
+ local valid = isdir(cachepath)
+ if valid and file.is_readable(cachepath) then
+ if not writable and file.is_writable(cachepath) then
+ readables[#readables+1] = cachepath
+ writable = cachepath
+ break
+ end
+ end
+ end
+ end
+ end
+ -- Some extra checking. If we have no writable or readable path then we simply
+ -- quit.
+ if not writable then
+ report_caches("fatal error: there is no valid writable cache path defined")
+ os.exit()
+ elseif #readables == 0 then
+ report_caches("fatal error: there is no valid readable cache path defined")
+ os.exit()
+ end
+ -- why here
+ writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case
+ -- moved here
+ local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree
+ if tree then
+ caches.tree = tree
+ writable = mkdirs(writable,base,more,tree)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more,tree)
+ end
else
- write_nl("")
+ writable = mkdirs(writable,base,more)
+ for i=1,#readables do
+ readables[i] = file.join(readables[i],base,more)
+ end
end
+ -- end
+ if trace_cache then
+ for i=1,#readables do
+ report_caches("using readable path '%s' (order %s)",readables[i],i)
+ end
+ report_caches("using writable path '%s'",writable)
+ end
+ identify = function()
+ return writable, readables
+ end
+ return writable, readables
end
---~ function logs.tex.start_page_number()
---~ local real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
---~ if real > 0 then
---~ if user > 0 then
---~ if sub > 0 then
---~ write(format("[%s.%s.%s",real,user,sub))
---~ else
---~ write(format("[%s.%s",real,user))
---~ end
---~ else
---~ write(format("[%s",real))
---~ end
---~ else
---~ write("[-")
---~ end
---~ end
-
---~ function logs.tex.stop_page_number()
---~ write("]")
---~ end
-
-local real, user, sub
-
-function logs.tex.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
-end
-
-function logs.tex.stop_page_number()
- if real > 0 then
- if user > 0 then
- if sub > 0 then
- logs.report("pages", "flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- else
- logs.report("pages", "flushing realpage %s, userpage %s",real,user)
+function caches.usedpaths()
+ local writable, readables = identify()
+ if #readables > 1 then
+ local result = { }
+ for i=1,#readables do
+ local readable = readables[i]
+ if usedreadables[i] or readable == writable then
+ result[#result+1] = format("readable: '%s' (order %s)",readable,i)
end
- else
- logs.report("pages", "flushing realpage %s",real)
end
+ result[#result+1] = format("writable: '%s'",writable)
+ return result
else
- logs.report("pages", "flushing page")
+ return writable
end
- io.flush()
end
-logs.tex.report_job_stat = statistics.show_job_stat
-
--- xml logging
+function caches.configfiles()
+ return table.concat(resolvers.instance.specification,";")
+end
-function logs.xml.report(category,fmt,...) -- new
- if fmt then
- write_nl(format("%s",category,format(fmt,...)))
- else
- write_nl(format("",category))
+function caches.hashed(tree)
+ tree = gsub(tree,"\\$","/")
+ tree = gsub(tree,"/+$","")
+ tree = lower(tree)
+ local hash = md5.hex(tree)
+ if trace_cache or trace_locating then
+ report_caches("hashing tree %s, hash %s",tree,hash)
end
+ return hash
end
-function logs.xml.line(fmt,...) -- new
- if fmt then
- write_nl(format("%s",format(fmt,...)))
+
+function caches.treehash()
+ local tree = caches.configfiles()
+ if not tree or tree == "" then
+ return false
else
- write_nl("")
+ return caches.hashed(tree)
end
end
-function logs.xml.start() if logs.level > 0 then tw("<%s>" ) end end
-function logs.xml.stop () if logs.level > 0 then tw("%s>") end end
-function logs.xml.push () if logs.level > 0 then tw("" ) end end
-
-function logs.xml.start_run()
- write_nl("")
- write_nl("") -- xmlns='www.pragma-ade.com/luatex/schemas/context-job.rng'
- write_nl("")
-end
+local r_cache, w_cache = { }, { } -- normally w in in r but who cares
-function logs.xml.stop_run()
- write_nl("")
+local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags
+ local tags = { ... }
+ local hash = concat(tags,"/")
+ local done = r_cache[hash]
+ if not done then
+ local writable, readables = identify() -- exit if not found
+ if #tags > 0 then
+ done = { }
+ for i=1,#readables do
+ done[i] = file.join(readables[i],...)
+ end
+ else
+ done = readables
+ end
+ r_cache[hash] = done
+ end
+ return done
end
-function logs.xml.start_page_number()
- write_nl(format(" 0 then
+ done = mkdirs(writable,...)
+ else
+ done = writable
+ end
+ w_cache[hash] = done
+ end
+ return done
end
-function logs.xml.stop_page_number()
- write("/>")
- write_nl("")
-end
+caches.getreadablepaths = getreadablepaths
+caches.getwritablepath = getwritablepath
-function logs.xml.report_output_pages(p,b)
- write_nl(format("", p))
- write_nl(format("", b))
- write_nl("")
+function caches.getfirstreadablefile(filename,...)
+ local rd = getreadablepaths(...)
+ for i=1,#rd do
+ local path = rd[i]
+ local fullname = file.join(path,filename)
+ if file.is_readable(fullname) then
+ usedreadables[i] = true
+ return fullname, path
+ end
+ end
+ return caches.setfirstwritablefile(filename,...)
end
-function logs.xml.report_output_log()
+function caches.setfirstwritablefile(filename,...)
+ local wr = getwritablepath(...)
+ local fullname = file.join(wr,filename)
+ return fullname, wr
end
-function logs.xml.report_tex_stat(k,v)
- texiowrite_nl("log",""..tostring(v).."")
+function caches.define(category,subcategory) -- for old times sake
+ return function()
+ return getwritablepath(category,subcategory)
+ end
end
-local level = 0
-
-function logs.xml.show_open(name)
- level = level + 1
- texiowrite_nl(format("",level,name))
+function caches.setluanames(path,name)
+ return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
end
-function logs.xml.show_close(name)
- texiowrite(" ")
- level = level - 1
+function caches.loaddata(readables,name)
+ if type(readables) == "string" then
+ readables = { readables }
+ end
+ for i=1,#readables do
+ local path = readables[i]
+ local tmaname, tmcname = caches.setluanames(path,name)
+ local loader = loadfile(tmcname) or loadfile(tmaname)
+ if loader then
+ loader = loader()
+ collectgarbage("step")
+ return loader
+ end
+ end
+ return false
end
-function logs.xml.show_load(name)
- texiowrite_nl(format("",level+1,name))
+function caches.is_writable(filepath,filename)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ return file.is_writable(tmaname)
end
---
-
-local name, banner = 'report', 'context'
+local saveoptions = { compact = true }
-local function report(category,fmt,...)
- if fmt then
- write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl(format("%s | %s",name,category))
- else
- write_nl(format("%s |",name))
+function caches.savedata(filepath,filename,data,raw)
+ local tmaname, tmcname = caches.setluanames(filepath,filename)
+ local reduce, simplify = true, true
+ if raw then
+ reduce, simplify = false, false
end
-end
-
-local function simple(fmt,...)
- if fmt then
- write_nl(format("%s | %s",name,format(fmt,...)))
+ data.cache_uuid = os.uuid()
+ if caches.direct then
+ file.savedata(tmaname,table.serialize(data,true,saveoptions))
else
- write_nl(format("%s |",name))
+ table.tofile(tmaname,data,true,saveoptions)
end
+ utilities.lua.compile(tmaname,tmcname)
end
-function logs.setprogram(_name_,_banner_,_verbose_)
- name, banner = _name_, _banner_
- if _verbose_ then
- trackers.enable("resolvers.locating")
- end
- logs.set_method("tex")
- logs.report = report -- also used in libraries
- logs.simple = simple -- only used in scripts !
- if utils then
- utils.report = simple
- end
- logs.verbose = _verbose_
-end
+-- moved from data-res:
-function logs.setverbose(what)
- if what then
- trackers.enable("resolvers.locating")
- else
- trackers.disable("resolvers.locating")
- end
- logs.verbose = what or false
-end
+local content_state = { }
-function logs.extendbanner(_banner_,_verbose_)
- banner = banner .. " | ".. _banner_
- if _verbose_ ~= nil then
- logs.setverbose(what)
- end
+function caches.contentstate()
+ return content_state or { }
end
-logs.verbose = false
-logs.report = logs.tex.report
-logs.simple = logs.tex.report
-
-function logs.reportlines(str) -- todo:
- for line in gmatch(str,"(.-)[\n\r]") do
- logs.report(line)
+function caches.loadcontent(cachename,dataname)
+ local name = caches.hashed(cachename)
+ local full, path = caches.getfirstreadablefile(name ..".lua","trees")
+ local filename = file.join(path,name)
+ local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
+ if blob then
+ local data = blob()
+ if data and data.content then
+ if data.type == dataname then
+ if data.version == resolvers.cacheversion then
+ content_state[#content_state+1] = data.uuid
+ if trace_locating then
+ report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename)
+ end
+ return data.content
+ else
+ report_resolvers("skipping '%s' for '%s' from '%s' (version mismatch)",dataname,cachename,filename)
+ end
+ else
+ report_resolvers("skipping '%s' for '%s' from '%s' (datatype mismatch)",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s' (no content)",dataname,cachename,filename)
+ end
+ elseif trace_locating then
+ report_resolvers("skipping '%s' for '%s' from '%s' (invalid file)",dataname,cachename,filename)
end
end
-function logs.reportline() -- for scripts too
- logs.report()
-end
-
-logs.simpleline = logs.reportline
-
-function logs.reportbanner() -- for scripts too
- logs.report(banner)
-end
-
-function logs.help(message,option)
- logs.reportbanner()
- logs.reportline()
- logs.reportlines(message)
- local moreinfo = logs.moreinfo or ""
- if moreinfo ~= "" and option ~= "nomoreinfo" then
- logs.reportline()
- logs.reportlines(moreinfo)
+function caches.collapsecontent(content)
+ for k, v in next, content do
+ if type(v) == "table" and #v == 1 then
+ content[k] = v[1]
+ end
end
end
-logs.set_level('error')
-logs.set_method('tex')
-
-function logs.system(whereto,process,jobname,category,...)
- for i=1,10 do
- local f = io.open(whereto,"a")
- if f then
- f:write(format("%s %s => %s => %s => %s\r",os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...)))
- f:close()
- break
+function caches.savecontent(cachename,dataname,content)
+ local name = caches.hashed(cachename)
+ local full, path = caches.setfirstwritablefile(name ..".lua","trees")
+ local filename = file.join(path,name) -- is full
+ local luaname, lucname = filename .. ".lua", filename .. ".luc"
+ if trace_locating then
+ report_resolvers("preparing '%s' for '%s'",dataname,cachename)
+ end
+ local data = {
+ type = dataname,
+ root = cachename,
+ version = resolvers.cacheversion,
+ date = os.date("%Y-%m-%d"),
+ time = os.date("%H:%M:%S"),
+ content = content,
+ uuid = os.uuid(),
+ }
+ local ok = io.savedata(luaname,table.serialize(data,true))
+ if ok then
+ if trace_locating then
+ report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname)
+ end
+ if utilities.lua.compile(luaname,lucname) then
+ if trace_locating then
+ report_resolvers("'%s' compiled to '%s'",dataname,lucname)
+ end
+ return true
else
- sleep(0.1)
+ if trace_locating then
+ report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname)
+ end
+ os.remove(lucname)
end
+ elseif trace_locating then
+ report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname)
end
end
---~ local syslogname = "oeps.xxx"
---~
---~ for i=1,10 do
---~ logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
---~ end
-
-function logs.fatal(where,...)
- logs.report(where,"fatal error: %s, aborting now",format(...))
- os.exit()
-end
-
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-inp'] = {
- version = 1.001,
+if not modules then modules = { } end modules ['data-met'] = {
+ version = 1.100,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
+ license = "see context related readme files"
}
--- After a few years using the code the large luat-inp.lua file
--- has been split up a bit. In the process some functionality was
--- dropped:
---
--- * support for reading lsr files
--- * selective scanning (subtrees)
--- * some public auxiliary functions were made private
---
--- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522]
--- TODO: check escaping in find etc, too much, too slow
+local find, format = string.find, string.format
+local sequenced = table.sequenced
+local addurlscheme, urlhashed = url.addscheme, url.hashed
--- This lib is multi-purpose and can be loaded again later on so that
--- additional functionality becomes available. We will split thislogs.report("fileio",
--- module in components once we're done with prototyping. This is the
--- first code I wrote for LuaTeX, so it needs some cleanup. Before changing
--- something in this module one can best check with Taco or Hans first; there
--- is some nasty trickery going on that relates to traditional kpse support.
+local trace_locating = false
--- To be considered: hash key lowercase, first entry in table filename
--- (any case), rest paths (so no need for optimization). Or maybe a
--- separate table that matches lowercase names to mixed case when
--- present. In that case the lower() cases can go away. I will do that
--- only when we run into problems with names ... well ... Iwona-Regular.
+trackers.register("resolvers.locating", function(v) trace_methods = v end)
+trackers.register("resolvers.methods", function(v) trace_methods = v end)
--- Beware, loading and saving is overloaded in luat-tmp!
-local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
-local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
-local next, type = next, type
-local lpegmatch = lpeg.match
+local report_methods = logs.reporter("resolvers","methods")
+
+local allocate = utilities.storage.allocate
-local trace_locating, trace_detail, trace_expansions = false, false, false
+local resolvers = resolvers
-trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.details", function(v) trace_detail = v end)
-trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
+local registered = { }
-if not resolvers then
- resolvers = {
- suffixes = { },
- formats = { },
- dangerous = { },
- suffixmap = { },
- alternatives = { },
- locators = { }, -- locate databases
- hashers = { }, -- load databases
- generators = { }, -- generate databases
- }
+local function splitmethod(filename) -- todo: filetype in specification
+ if not filename then
+ return { scheme = "unknown", original = filename }
+ end
+ if type(filename) == "table" then
+ return filename -- already split
+ end
+ filename = file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
+ end
+ local specification = url.hashed(filename)
+ if not specification.scheme or specification.scheme == "" then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
+ else
+ return specification
+ end
end
-local resolvers = resolvers
+resolvers.splitmethod = splitmethod -- bad name but ok
-resolvers.locators .notfound = { nil }
-resolvers.hashers .notfound = { nil }
-resolvers.generators.notfound = { nil }
-
-resolvers.cacheversion = '1.0.1'
-resolvers.cnfname = 'texmf.cnf'
-resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
-resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
-
-local dummy_path_expr = "^!*unset/*$"
-
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
-local alternatives = resolvers.alternatives
-
-formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
-formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
-formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
-formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
-formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
-formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
-formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
-formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' } -- 'ttf'
-formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
-formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
-formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
-formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
-formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
-formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
-formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
-formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
-formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
-
-formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
-formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
-
-formats ['texmfscripts'] = 'TEXMFSCRIPTS' -- new
-suffixes['texmfscripts'] = { 'rb', 'pl', 'py' } -- 'lua'
-
-formats ['lua'] = 'LUAINPUTS' -- new
-suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
-
--- backward compatible ones
-
-alternatives['map files'] = 'map'
-alternatives['enc files'] = 'enc'
-alternatives['cid maps'] = 'cid' -- great, why no cid files
-alternatives['font feature files'] = 'fea' -- and fea files here
-alternatives['opentype fonts'] = 'otf'
-alternatives['truetype fonts'] = 'ttf'
-alternatives['truetype collections'] = 'ttc'
-alternatives['truetype dictionary'] = 'dfont'
-alternatives['type1 fonts'] = 'pfb'
-
--- obscure ones
-
-formats ['misc fonts'] = ''
-suffixes['misc fonts'] = { }
-
-formats ['sfd'] = 'SFDFONTS'
-suffixes ['sfd'] = { 'sfd' }
-alternatives['subfont definition files'] = 'sfd'
-
--- lib paths
-
-formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
-suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
+-- the second argument is always analyzed (saves time later on) and the original
+-- gets passed as original but also as argument
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical pusposes we now avoid this and use a
--- instance variable.
+local function methodhandler(what,first,...) -- filename can be nil or false
+ local method = registered[what]
+ if method then
+ local how, namespace = method.how, method.namespace
+ if how == "uri" or how == "url" then
+ local specification = splitmethod(first)
+ local scheme = specification.scheme
+ local resolver = namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_methods("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_methods("resolver: method=%s, how=%s, no handler",what,how)
+ end
+ end
+ elseif how == "tag" then
+ local resolver = namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_methods("resolver: method=%s, how=%s, tag=%s",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_methods("resolver: method=%s, how=%s, default",what,how)
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_methods("resolver: method=%s, how=%s, unknown",what,how)
+ end
+ end
+ end
+ else
+ report_methods("resolver: method=%s, unknown",what)
+ end
+end
--- here we catch a few new thingies (todo: add these paths to context.tmf)
---
--- FONTFEATURES = .;$TEXMF/fonts/fea//
--- FONTCIDMAPS = .;$TEXMF/fonts/cid//
+resolvers.methodhandler = methodhandler
+
+function resolvers.registermethod(name,namespace,how)
+ registered[name] = { how = how or "tag", namespace = namespace }
+ namespace["byscheme"] = function(scheme,filename,...)
+ if scheme == "file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
+ end
+ end
+end
--- we always have one instance active
+local concatinators = allocate { notfound = file.join } -- concatinate paths
+local locators = allocate { notfound = function() end } -- locate databases
+local hashers = allocate { notfound = function() end } -- load databases
+local generators = allocate { notfound = function() end } -- generate databases
-resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
+resolvers.concatinators = concatinators
+resolvers.locators = locators
+resolvers.hashers = hashers
+resolvers.generators = generators
-function resolvers.newinstance()
+local registermethod = resolvers.registermethod
- -- store once, freeze and faster (once reset we can best use
- -- instance.environment) maybe better have a register suffix
- -- function
+registermethod("concatinators",concatinators,"tag")
+registermethod("locators", locators, "uri")
+registermethod("hashers", hashers, "uri")
+registermethod("generators", generators, "uri")
- for k, v in next, suffixes do
- for i=1,#v do
- local vi = v[i]
- if vi then
- suffixmap[vi] = k
- end
- end
- end
- -- because vf searching is somewhat dangerous, we want to prevent
- -- too liberal searching esp because we do a lookup on the current
- -- path anyway; only tex (or any) is safe
+end -- of closure
- for k, v in next, formats do
- dangerous[k] = true
- end
- dangerous.tex = nil
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-res'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- In practice we will work within one tds tree, but i want to keep
+-- the option open to build tools that look at multiple trees, which is
+-- why we keep the tree specific data in a table. We used to pass the
+-- instance but for practical purposes we now avoid this and use a
+-- instance variable. We always have one instance active (sort of global).
+
+-- todo: cache:/// home:///
+
+local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
+local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
+local next, type, rawget = next, type, rawget
+local os = os
- -- the instance
+local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
- local newinstance = {
- rootpath = '',
- treepath = '',
- progname = 'context',
- engine = 'luatex',
- format = '',
- environment = { },
- variables = { },
- expansions = { },
- files = { },
- remap = { },
- configuration = { },
- setup = { },
- order = { },
- found = { },
- foundintrees = { },
- kpsevars = { },
- hashes = { },
- cnffiles = { },
- luafiles = { },
- lists = { },
- remember = true,
- diskcache = true,
- renewcache = false,
- scandisk = true,
- cachepath = nil,
- loaderror = false,
- sortdata = false,
- savelists = true,
- cleanuppaths = true,
- allresults = false,
- pattern = nil, -- lists
- data = { }, -- only for loading
- force_suffixes = true,
- fakepaths = { },
- }
+local filedirname = file.dirname
+local filebasename = file.basename
+local fileextname = file.extname
+local filejoin = file.join
+local collapsepath = file.collapsepath
+local joinpath = file.joinpath
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
- local ne = newinstance.environment
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
- for k,v in next, os.env do
- ne[k] = resolvers.bare_variable(v)
- end
+local report_resolving = logs.reporter("resolvers","resolving")
- return newinstance
+local resolvers = resolvers
-end
+local expandedpathfromlist = resolvers.expandedpathfromlist
+local checkedvariable = resolvers.checkedvariable
+local splitconfigurationpath = resolvers.splitconfigurationpath
+local methodhandler = resolvers.methodhandler
-function resolvers.setinstance(someinstance)
- instance = someinstance
- resolvers.instance = someinstance
- return someinstance
-end
+local initializesetter = utilities.setters.initialize
-function resolvers.reset()
- return resolvers.setinstance(resolvers.newinstance())
-end
+local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
-local function reset_hashes()
- instance.lists = { }
- instance.found = { }
-end
+resolvers.cacheversion = '1.0.1'
+resolvers.configbanner = ''
+resolvers.homedir = environment.homedir
+resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
+resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfstate = "unknown"
-local function check_configuration() -- not yet ok, no time for debugging now
- if os.env["OSFONTDIR"] then
- -- ok
- elseif os.type == "windows" then
- os.setenv("OSFONTDIR","c:/windows/fonts//")
- elseif os.type == "macosx" then
- os.setenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- end
-end
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- what a rubish path
+resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c},}}'
-function resolvers.bare_variable(str) -- assumes str is a string
- return (gsub(str,"\s*([\"\']?)(.+)%1\s*", "%2"))
-end
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
- end
-end
-resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
+local unset_variable = "unset"
-function resolvers.osenv(key)
- local ie = instance.environment
- local value = ie[key]
- if value == nil then
- -- local e = os.getenv(key)
- local e = os.env[key]
- if e == nil then
- -- value = "" -- false
- else
- value = resolvers.bare_variable(e)
- end
- ie[key] = value
- end
- return value or ""
-end
+local formats = resolvers.formats
+local suffixes = resolvers.suffixes
+local dangerous = resolvers.dangerous
+local suffixmap = resolvers.suffixmap
-function resolvers.env(key)
- return instance.environment[key] or resolvers.osenv(key)
-end
+resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
---
+resolvers.instance = resolvers.instance or nil -- the current one (slow access)
+local instance = resolvers.instance or nil -- the current one (fast access)
-local function expand_vars(lst) -- simple vars
- local variables, env = instance.variables, resolvers.env
- local function resolve(a)
- return variables[a] or env(a)
- end
- for k=1,#lst do
- lst[k] = gsub(lst[k],"%$([%a%d%_%-]+)",resolve)
- end
-end
+-- An instance has an environment (coming from the outside, kept raw), variables
+-- (coming from the configuration file), and expansions (variables with nested
+-- variables replaced). One can push something into the outer environment and
+-- its internal copy, but only the later one will be the raw unprefixed variant.
-local function expanded_var(var) -- simple vars
- local function resolve(a)
- return instance.variables[a] or resolvers.env(a)
+function resolvers.setenv(key,value,raw)
+ if instance then
+ -- this one will be consulted first when we stay inside
+ -- the current environment
+ instance.environment[key] = value
+ -- we feed back into the environment, and as this is used
+ -- by other applications (via os.execute) we need to make
+ -- sure that prefixes are resolve
+ ossetenv(key,raw and value or resolvers.resolve(value))
end
- return (gsub(var,"%$([%a%d%_%-]+)",resolve))
end
-local function entry(entries,name)
- if name and (name ~= "") then
- name = gsub(name,'%$','')
- local result = entries[name..'.'..instance.progname] or entries[name]
- if result then
- return result
- else
- result = resolvers.env(name)
- if result then
- instance.variables[name] = result
- resolvers.expand_variables()
- return instance.expansions[name] or ""
- end
- end
- end
- return ""
-end
+-- Beware we don't want empty here as this one can be called early on
+-- and therefore we use rawget.
-local function is_entry(entries,name)
- if name and name ~= "" then
- name = gsub(name,'%$','')
- return (entries[name..'.'..instance.progname] or entries[name]) ~= nil
+local function getenv(key)
+ local value = rawget(instance.environment,key)
+ if value and value ~= "" then
+ return value
else
- return false
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
end
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
+resolvers.getenv = getenv
+resolvers.env = getenv
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
+-- We are going to use some metatable trickery where we backtrack from
+-- expansion to variable to environment.
-local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
+local function resolve(k)
+ return instance.expansions[k]
end
-local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
-end
+local dollarstripper = lpeg.stripper("$")
+local inhibitstripper = P("!")^0 * Cs(P(1)^0)
+local backslashswapper = lpeg.replacer("\\","/")
-local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
- end
- end
- return "{" .. concat(t,",") .. "}"
-end
+local somevariable = P("$") / ""
+local somekey = C(R("az","AZ","09","__","--")^1)
+local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
+ + P(";") * (P(";") / "")
+ + P(1)
+local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
-local function do_three(a,b,c)
- return a .. b.. c
-end
+local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
+local variablecleaner = Cs((cleaner + P(1))^0)
-local function splitpathexpr(str, t, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
- if trace_expansions then
- logs.report("fileio","expanding variable '%s'",str)
- end
- t = t or { }
- str = gsub(str,",}",",@}")
- str = gsub(str,"{,","{@,")
- -- str = "@" .. str .. "@"
- local ok, done
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
- str = gsub(str,"[{}]", "")
- str = gsub(str,"@","")
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then t[#t+1] = s end
- end
- else
- for s in gmatch(str,"[^,]+") do
- t[#t+1] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- logs.report("fileio","% 4i: %s",k,t[k])
- end
- end
- return t
-end
+local somevariable = R("az","AZ","09","__","--")^1 / resolve
+local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/""))
+local variableresolver = Cs((variable + P(1))^0)
-local function expanded_path_from_list(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
- for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- local function validate(s)
- s = file.collapse_path(s)
- return s ~= "" and not find(s,dummy_path_expr) and s
- end
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = file.collapse_path(p)
- if p ~= "" then newlist[#newlist+1] = p end
- end
- end
- end
- return newlist
+local function expandedvariable(var)
+ return lpegmatch(variableexpander,var) or var
end
--- we follow a rather traditional approach:
---
--- (1) texmf.cnf given in TEXMFCNF
--- (2) texmf.cnf searched in default variable
---
--- also we now follow the stupid route: if not set then just assume *one*
--- cnf file under texmf (i.e. distribution)
+function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only
-local args = environment and environment.original_arguments or arg -- this needs a cleanup
+ if trace_locating then
+ report_resolving("creating instance")
+ end
-resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
-resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
+ local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate()
-function resolvers.getownpath()
- local ownpath = resolvers.ownpath or os.selfdir
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- end
- local binary = resolvers.ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and file.dirname(binary)
- end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local b = file.join(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- logs.report("fileio","following symlink '%s' to '%s'",p,pp)
- end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- logs.report("fileio","unable to check path '%s'",p)
- end
- ownpath = p
- end
- break
- end
+ local newinstance = {
+ environment = environment,
+ variables = variables,
+ expansions = expansions,
+ order = order,
+ files = allocate(),
+ setups = allocate(),
+ found = allocate(),
+ foundintrees = allocate(),
+ hashes = allocate(),
+ hashed = allocate(),
+ specification = allocate(),
+ lists = allocate(),
+ data = allocate(), -- only for loading
+ fakepaths = allocate(),
+ remember = true,
+ diskcache = true,
+ renewcache = false,
+ loaderror = false,
+ savelists = true,
+ pattern = nil, -- lists
+ force_suffixes = true,
+ }
+
+ setmetatableindex(variables,function(t,k)
+ local v
+ for i=1,#order do
+ v = order[i][k]
+ if v ~= nil then
+ t[k] = v
+ return v
end
end
- if not ownpath or ownpath == "" then
- ownpath = "."
- logs.report("fileio","forcing fallback ownpath .")
- elseif trace_locating then
- logs.report("fileio","using ownpath '%s'",ownpath)
+ if v == nil then
+ v = ""
end
- end
- resolvers.ownpath = ownpath
- function resolvers.getownpath()
- return resolvers.ownpath
- end
- return ownpath
-end
-
-local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
+ t[k] = v
+ return v
+ end)
-local function identify_own()
- local ownpath = resolvers.getownpath() or dir.current()
- local ie = instance.environment
- if ownpath then
- if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
- if resolvers.env('SELFAUTODIR') == "" then os.env['SELFAUTODIR'] = file.collapse_path(ownpath .. "/..") end
- if resolvers.env('SELFAUTOPARENT') == "" then os.env['SELFAUTOPARENT'] = file.collapse_path(ownpath .. "/../..") end
- else
- logs.report("fileio","error: unable to locate ownpath")
- os.exit()
- end
- if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
- if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
- if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
- if trace_locating then
- for i=1,#own_places do
- local v = own_places[i]
- logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
+ setmetatableindex(environment, function(t,k)
+ local v = osgetenv(k)
+ if v == nil then
+ v = variables[k]
end
- end
- identify_own = function() end
-end
-
-function resolvers.identify_cnf()
- if #instance.cnffiles == 0 then
- -- fallback
- identify_own()
- -- the real search
- resolvers.expand_variables()
- local t = resolvers.split_path(resolvers.env('TEXMFCNF'))
- t = expanded_path_from_list(t)
- expand_vars(t) -- redundant
- local function locate(filename,list)
- for i=1,#t do
- local ti = t[i]
- local texmfcnf = file.collapse_path(file.join(ti,filename))
- if lfs.isfile(texmfcnf) then
- list[#list+1] = texmfcnf
- end
- end
+ if v ~= nil then
+ v = checkedvariable(v) or ""
end
- locate(resolvers.luaname,instance.luafiles)
- locate(resolvers.cnfname,instance.cnffiles)
- end
-end
+ v = resolvers.repath(v) -- for taco who has a : separated osfontdir
+ t[k] = v
+ return v
+ end)
-local function load_cnf_file(fname)
- fname = resolvers.clean_path(fname)
- local lname = file.replacesuffix(fname,'lua')
- if lfs.isfile(lname) then
- local dname = file.dirname(fname) -- fname ?
- if not instance.configuration[dname] then
- resolvers.load_data(dname,'configuration',lname and file.basename(lname))
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- else
- f = io.open(fname)
- if f then
- if trace_locating then
- logs.report("fileio","loading configuration file %s", fname)
- end
- local line, data, n, k, v
- local dname = file.dirname(fname)
- if not instance.configuration[dname] then
- instance.configuration[dname] = { }
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- local data = instance.configuration[dname]
- while true do
- local line, n = f:read(), 0
- if line then
- while true do -- join lines
- line, n = gsub(line,"\\%s*$", "")
- if n > 0 then
- line = line .. f:read()
- else
- break
- end
- end
- if not find(line,"^[%%#]") then
- local l = gsub(line,"%s*%%.*$","")
- local k, v = match(l,"%s*(.-)%s*=%s*(.-)%s*$")
- if k and v and not data[k] then
- v = gsub(v,"[%%#].*",'')
- data[k] = gsub(v,"~","$HOME")
- instance.kpsevars[k] = true
- end
- end
- else
- break
- end
- end
- f:close()
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'", fname)
+ setmetatableindex(expansions, function(t,k)
+ local v = environment[k]
+ if type(v) == "string" then
+ v = lpegmatch(variableresolver,v)
+ v = lpegmatch(variablecleaner,v)
end
- end
+ t[k] = v
+ return v
+ end)
+
+ return newinstance
+
end
-local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do
- if not instance.variables[k] then
- if instance.environment[k] then
- instance.variables[k] = instance.environment[k]
- else
- instance.kpsevars[k] = true
- instance.variables[k] = resolvers.bare_variable(v)
- end
- end
- end
- end
+function resolvers.setinstance(someinstance) -- only one instance is active
+ instance = someinstance
+ resolvers.instance = someinstance
+ return someinstance
end
-function resolvers.load_cnf()
- local function loadoldconfigdata()
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- load_cnf_file(cnffiles[i])
- end
- end
- -- instance.cnffiles contain complete names now !
- -- we still use a funny mix of cnf and new but soon
- -- we will switch to lua exclusively as we only use
- -- the file to collect the tree roots
- if #instance.cnffiles == 0 then
- if trace_locating then
- logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
- end
- else
- local cnffiles = instance.cnffiles
- instance.rootpath = cnffiles[1]
- for k=1,#cnffiles do
- instance.cnffiles[k] = file.collapse_path(cnffiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- if instance.diskcache and not instance.renewcache then
- resolvers.loadoldconfig(instance.cnffiles)
- if instance.loaderror then
- loadoldconfigdata()
- resolvers.saveoldconfig()
- end
- else
- loadoldconfigdata()
- if instance.renewcache then
- resolvers.saveoldconfig()
- end
- end
- collapse_cnf_data()
- end
- check_configuration()
+function resolvers.reset()
+ return resolvers.setinstance(resolvers.newinstance())
end
-function resolvers.load_lua()
- if #instance.luafiles == 0 then
- -- yet harmless
- else
- instance.rootpath = instance.luafiles[1]
- local luafiles = instance.luafiles
- for k=1,#luafiles do
- instance.luafiles[k] = file.collapse_path(luafiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- resolvers.loadnewconfig()
- collapse_cnf_data()
- end
- check_configuration()
+local function reset_hashes()
+ instance.lists = { }
+ instance.found = { }
end
--- database loading
+local slash = P("/")
-function resolvers.load_hash()
- resolvers.locatelists()
- if instance.diskcache and not instance.renewcache then
- resolvers.loadfiles()
- if instance.loaderror then
- resolvers.loadlists()
- resolvers.savefiles()
- end
+local pathexpressionpattern = Cs (
+ Cc("^") * (
+ Cc("%") * S(".-")
+ + slash^2 * P(-1) / "/.*"
+ + slash^2 / "/.-/"
+ + (1-slash) * P(-1) * Cc("/")
+ + P(1)
+ )^1 * Cc("$") -- yes or no $
+)
+
+local cache = { }
+
+local function makepathexpression(str)
+ if str == "." then
+ return "^%./$"
else
- resolvers.loadlists()
- if instance.renewcache then
- resolvers.savefiles()
+ local c = cache[str]
+ if not c then
+ c = lpegmatch(pathexpressionpattern,str)
+ cache[str] = c
end
+ return c
end
end
-function resolvers.append_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' appended",tag)
- end
- insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.prepend_hash(type,tag,name)
+local function reportcriticalvariables()
if trace_locating then
- logs.report("fileio","hash '%s' prepended",tag)
- end
- insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
-end
-
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.env('TEXMF'))
- insert(t,1,specification)
- local newspec = concat(t,";")
- if instance.environment["TEXMF"] then
- instance.environment["TEXMF"] = newspec
- elseif instance.variables["TEXMF"] then
- instance.variables["TEXMF"] = newspec
- else
- -- weird
+ for i=1,#resolvers.criticalvars do
+ local k = resolvers.criticalvars[i]
+ local v = resolvers.getenv(k) or "unknown" -- this one will not resolve !
+ report_resolving("variable '%s' set to '%s'",k,v)
+ end
+ report_resolving()
end
- resolvers.expand_variables()
- reset_hashes()
+ reportcriticalvariables = function() end
end
--- locators
-
-function resolvers.locatelists()
- local texmfpaths = resolvers.clean_path_list('TEXMF')
- for i=1,#texmfpaths do
- local path = texmfpaths[i]
+local function identify_configuration_files()
+ local specification = instance.specification
+ if #specification == 0 then
+ local cnfspec = getenv('TEXMFCNF')
+ if cnfspec == "" then
+ cnfspec = resolvers.luacnfspec
+ resolvers.luacnfstate = "default"
+ else
+ resolvers.luacnfstate = "environment"
+ end
+ reportcriticalvariables()
+ local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
+ local luacnfname = resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename = collapsepath(filejoin(cnfpaths[i],luacnfname))
+ local realname = resolvers.resolve(filename)
+ if lfs.isfile(realname) then
+ specification[#specification+1] = filename
+ if trace_locating then
+ report_resolving("found configuration file '%s'",realname)
+ end
+ elseif trace_locating then
+ report_resolving("unknown configuration file '%s'",realname)
+ end
+ end
if trace_locating then
- logs.report("fileio","locating list of '%s'",path)
+ report_resolving()
end
- resolvers.locatedatabase(file.collapse_path(path))
- end
-end
-
-function resolvers.locatedatabase(specification)
- return resolvers.methodhandler('locators', specification)
-end
+ elseif trace_locating then
+ report_resolving("configuration files already identified")
+ end
+end
+
+local function load_configuration_files()
+ local specification = instance.specification
+ if #specification > 0 then
+ local luacnfname = resolvers.luacnfname
+ for i=1,#specification do
+ local filename = specification[i]
+ local pathname = filedirname(filename)
+ local filename = filejoin(pathname,luacnfname)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local setups = instance.setups
+ local data = blob()
+ data = data and data.content
+ if data then
+ if trace_locating then
+ report_resolving("loading configuration file '%s'",filename)
+ report_resolving()
+ end
+ local variables = data.variables or { }
+ local warning = false
+ for k, v in next, data do
+ local variant = type(v)
+ if variant == "table" then
+ initializesetter(filename,k,v)
+ elseif variables[k] == nil then
+ if trace_locating and not warning then
+ report_resolving("variables like '%s' in configuration file '%s' should move to the 'variables' subtable",
+ k,resolvers.resolve(filename))
+ warning = true
+ end
+ variables[k] = v
+ end
+ end
+ setups[pathname] = variables
+ if resolvers.luacnfstate == "default" then
+ -- the following code is not tested
+ local cnfspec = variables["TEXMFCNF"]
+ if cnfspec then
+ if trace_locating then
+ report_resolving("reloading configuration due to TEXMF redefinition")
+ end
+ -- we push the value into the main environment (osenv) so
+ -- that it takes precedence over the default one and therefore
+ -- also over following definitions
+ resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
+ -- we now identify and load the specified configuration files
+ instance.specification = { }
+ identify_configuration_files()
+ load_configuration_files()
+ -- we prevent further overload of the configuration variable
+ resolvers.luacnfstate = "configuration"
+ -- we quit the outer loop
+ break
+ end
+ end
-function resolvers.locators.tex(specification)
- if specification and specification ~= '' and lfs.isdir(specification) then
- if trace_locating then
- logs.report("fileio","tex locator '%s' found",specification)
+ else
+ if trace_locating then
+ report_resolving("skipping configuration file '%s' (no content)",filename)
+ end
+ setups[pathname] = { }
+ instance.loaderror = true
+ end
+ elseif trace_locating then
+ report_resolving("skipping configuration file '%s' (no file)",filename)
+ end
+ instance.order[#instance.order+1] = instance.setups[pathname]
+ if instance.loaderror then
+ break
+ end
end
- resolvers.append_hash('file',specification,filename)
elseif trace_locating then
- logs.report("fileio","tex locator '%s' not found",specification)
+ report_resolving("warning: no lua configuration files found")
end
end
--- hashers
-
-function resolvers.hashdatabase(tag,name)
- return resolvers.methodhandler('hashers',tag,name)
-end
+-- scheme magic ... database loading
-function resolvers.loadfiles()
- instance.loaderror = false
- instance.files = { }
+local function load_file_databases()
+ instance.loaderror, instance.files = false, allocate()
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- resolvers.hashdatabase(hash.tag,hash.name)
+ resolvers.hashers.byscheme(hash.type,hash.name)
if instance.loaderror then break end
end
end
end
-function resolvers.hashers.tex(tag,name)
- resolvers.load_data(tag,'files')
-end
-
--- generators:
-
-function resolvers.loadlists()
- local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.generatedatabase(hashes[i].tag)
- end
-end
-
-function resolvers.generatedatabase(specification)
- return resolvers.methodhandler('generators', specification)
-end
-
--- starting with . or .. etc or funny char
-
-local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-
---~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpeg.P(" ")
---~ local l_character = lpeg.patterns.utf8
---~ local l_dangerous = lpeg.P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
---~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
-
---~ local function test(str)
---~ print(str,lpeg.match(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
-
-function resolvers.generators.tex(specification)
- local tag = specification
- if trace_locating then
- logs.report("fileio","scanning path '%s'",specification)
- end
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local function action(path)
- local full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- -- if lpegmatch(l_normal,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if path then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- if path then
- action(path..'/'..name)
+local function locate_file_databases()
+ -- todo: cache:// and tree:// (runtime)
+ local texmfpaths = resolvers.expandedpathlist('TEXMF')
+ if #texmfpaths > 0 then
+ for i=1,#texmfpaths do
+ local path = collapsepath(texmfpaths[i])
+ local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
+ if stripped ~= "" then
+ local runtime = stripped == path
+ path = resolvers.cleanpath(path)
+ local spec = resolvers.splitmethod(stripped)
+ if spec.scheme == "cache" or spec.scheme == "file" then
+ stripped = spec.path
+ elseif runtime and (spec.noscheme or spec.scheme == "file") then
+ stripped = "tree:///" .. stripped
+ end
+ if trace_locating then
+ if runtime then
+ report_resolving("locating list of '%s' (runtime)",path)
else
- action(name)
+ report_resolving("locating list of '%s' (cached)",path)
end
end
+ methodhandler('locators',stripped)
end
end
- end
- action()
- if trace_locating then
- logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
+ if trace_locating then
+ report_resolving()
+ end
+ elseif trace_locating then
+ report_resolving("no texmf paths are defined (using TEXMF)")
end
end
--- savers, todo
-
-function resolvers.savefiles()
- resolvers.save_data('files')
+local function generate_file_databases()
+ local hashes = instance.hashes
+ for k=1,#hashes do
+ local hash = hashes[k]
+ methodhandler('generators',hash.name)
+ end
+ if trace_locating then
+ report_resolving()
+ end
end
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
---~ local checkedsplit = string.checkedsplit
-
-local cache = { }
-
-local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
-
-local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- str = gsub(str,"\\","/")
---~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
-local split = lpegmatch(splitter,str)
- found = { }
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- found[#found+1] = s
- end
- end
- if trace_expansions then
- logs.report("fileio","splitting path specification '%s'",str)
- for k=1,#found do
- logs.report("fileio","% 4i: %s",k,found[k])
- end
+local function save_file_databases() -- will become cachers
+ for i=1,#instance.hashes do
+ local hash = instance.hashes[i]
+ local cachename = hash.name
+ if hash.cache then
+ local content = instance.files[cachename]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",cachename)
end
- cache[str] = found
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolving("not saving runtime tree '%s'",cachename)
end
end
- return found
end
-resolvers.split_kpse_path = split_kpse_path
-
-function resolvers.splitconfig()
- for i=1,#instance do
- local c = instance[i]
- for k,v in next, c do
- if type(v) == 'string' then
- local t = split_kpse_path(v)
- if #t > 1 then
- c[k] = t
- end
- end
+local function load_databases()
+ locate_file_databases()
+ if instance.diskcache and not instance.renewcache then
+ load_file_databases()
+ if instance.loaderror then
+ generate_file_databases()
+ save_file_databases()
+ end
+ else
+ generate_file_databases()
+ if instance.renewcache then
+ save_file_databases()
end
end
end
-function resolvers.joinconfig()
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do -- indexed?
- if type(v) == 'table' then
- c[k] = file.join_path(v)
- end
+function resolvers.appendhash(type,name,cache)
+ -- safeguard ... tricky as it's actually a bug when seen twice
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash '%s' appended",name)
end
+ insert(instance.hashes, { type = type, name = name, cache = cache } )
+ instance.hashed[name] = cache
end
end
-function resolvers.split_path(str)
- if type(str) == 'table' then
- return str
- else
- return split_kpse_path(str)
+function resolvers.prependhash(type,name,cache)
+ -- safeguard ... tricky as it's actually a bug when seen twice
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash '%s' prepended",name)
+ end
+ insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
+ instance.hashed[name] = cache
end
end
-function resolvers.join_path(str)
- if type(str) == 'table' then
- return file.join_path(str)
+function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
+ local t = resolvers.splitpath(getenv('TEXMF'))
+ insert(t,1,specification)
+ local newspec = concat(t,";")
+ if instance.environment["TEXMF"] then
+ instance.environment["TEXMF"] = newspec
+ elseif instance.variables["TEXMF"] then
+ instance.variables["TEXMF"] = newspec
else
- return str
+ -- weird
end
+ reset_hashes()
end
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h, p = { }, { }, split_kpse_path(v)
+ local t, tn, h, p = { }, 0, { }, splitconfigurationpath(v)
for kk=1,#p do
local vv = p[kk]
if vv ~= "" and not h[vv] then
- t[#t+1] = vv
+ tn = tn + 1
+ t[tn] = vv
h[vv] = true
end
end
@@ -9184,343 +11600,92 @@ end
-- end of split/join code
-function resolvers.saveoldconfig()
- resolvers.splitconfig()
- resolvers.save_data('configuration')
- resolvers.joinconfig()
-end
-
-resolvers.configbanner = [[
--- This is a Luatex configuration file created by 'luatools.lua' or
--- 'luatex.exe' directly. For comment, suggestions and questions you can
--- contact the ConTeXt Development Team. This configuration file is
--- not copyrighted. [HH & TH]
-]]
-
-function resolvers.serialize(files)
- -- This version is somewhat optimized for the kind of
- -- tables that we deal with, so it's much faster than
- -- the generic serializer. This makes sense because
- -- luatools and mtxtools are called frequently. Okay,
- -- we pay a small price for properly tabbed tables.
- local t = { }
- local function dump(k,v,m) -- could be moved inline
- if type(v) == 'string' then
- return m .. "['" .. k .. "']='" .. v .. "',"
- elseif #v == 1 then
- return m .. "['" .. k .. "']='" .. v[1] .. "',"
- else
- return m .. "['" .. k .. "']={'" .. concat(v,"','").. "'},"
- end
- end
- t[#t+1] = "return {"
- if instance.sortdata then
- local sortedfiles = sortedkeys(files)
- for i=1,#sortedfiles do
- local k = sortedfiles[i]
- local fk = files[k]
- if type(fk) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- local sortedfk = sortedkeys(fk)
- for j=1,#sortedfk do
- local kk = sortedfk[j]
- t[#t+1] = dump(kk,fk[kk],"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,fk,"\t")
- end
- end
- else
- for k, v in next, files do
- if type(v) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- for kk,vv in next, v do
- t[#t+1] = dump(kk,vv,"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,v,"\t")
- end
- end
- end
- t[#t+1] = "}"
- return concat(t,"\n")
-end
-
-local data_state = { }
-
-function resolvers.data_state()
- return data_state or { }
-end
-
-function resolvers.save_data(dataname, makename) -- untested without cache overload
- for cachename, files in next, instance[dataname] do
- local name = (makename or file.join)(cachename,dataname)
- local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_locating then
- logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
- end
- for k, v in next, files do
- if type(v) == "table" and #v == 1 then
- files[k] = v[1]
- end
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = files,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,resolvers.serialize(data))
- if ok then
- if trace_locating then
- logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
- end
- if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_locating then
- logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
- end
- else
- if trace_locating then
- logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
- end
- end
-end
-
-function resolvers.load_data(pathname,dataname,filename,makename) -- untested without cache overload
- filename = ((not filename or (filename == "")) and dataname) or filename
- filename = (makename and makename(dataname,filename)) or file.join(pathname,filename)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- data_state[#data_state+1] = data.uuid
- if trace_locating then
- logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = data.content
- else
- if trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
-end
-
--- some day i'll use the nested approach, but not yet (actually we even drop
--- engine/progname support since we have only luatex now)
---
--- first texmfcnf.lua files are located, next the cached texmf.cnf files
---
--- return {
--- TEXMFBOGUS = 'effe checken of dit werkt',
--- }
-
-function resolvers.resetconfig()
- identify_own()
- instance.configuration, instance.setup, instance.order, instance.loaderror = { }, { }, { }, false
-end
-
-function resolvers.loadnewconfig()
- local luafiles = instance.luafiles
- for i=1,#luafiles do
- local cnf = luafiles[i]
- local pathname = file.dirname(cnf)
- local filename = file.join(pathname,resolvers.luaname)
- local blob = loadfile(filename)
- if blob then
- local data = blob()
- if data then
- if trace_locating then
- logs.report("fileio","loading configuration file '%s'",filename)
- end
- if true then
- -- flatten to variable.progname
- local t = { }
- for k, v in next, data do -- v = progname
- if type(v) == "string" then
- t[k] = v
- else
- for kk, vv in next, v do -- vv = variable
- if type(vv) == "string" then
- t[vv.."."..v] = kk
- end
- end
- end
- end
- instance['setup'][pathname] = t
- else
- instance['setup'][pathname] = data
- end
- else
- if trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance['setup'][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance.order[#instance.order+1] = instance.setup[pathname]
- if instance.loaderror then break end
- end
-end
-
-function resolvers.loadoldconfig()
- if not instance.renewcache then
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- local cnf = cnffiles[i]
- local dname = file.dirname(cnf)
- resolvers.load_data(dname,'configuration')
- instance.order[#instance.order+1] = instance.configuration[dname]
- if instance.loaderror then break end
- end
- end
- resolvers.joinconfig()
-end
+-- we used to have 'files' and 'configurations' so therefore the following
+-- shared function
-function resolvers.expand_variables()
- local expansions, environment, variables = { }, instance.environment, instance.variables
- local env = resolvers.env
- instance.expansions = expansions
- local engine, progname = instance.engine, instance.progname
- if type(engine) ~= "string" then instance.engine, engine = "", "" end
- if type(progname) ~= "string" then instance.progname, progname = "", "" end
- if engine ~= "" then environment['engine'] = engine end
- if progname ~= "" then environment['progname'] = progname end
- for k,v in next, environment do
- local a, b = match(k,"^(%a+)%_(.*)%s*$")
- if a and b then
- expansions[a..'.'..b] = v
- else
- expansions[k] = v
- end
- end
- for k,v in next, environment do -- move environment to expansions
- if not expansions[k] then expansions[k] = v end
- end
- for k,v in next, variables do -- move variables to expansions
- if not expansions[k] then expansions[k] = v end
- end
- local busy = false
- local function resolve(a)
- busy = true
- return expansions[a] or env(a)
- end
- while true do
- busy = false
- for k,v in next, expansions do
- local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
- local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
- if n > 0 or m > 0 then
- expansions[k]= s
- end
- end
- if not busy then break end
- end
- for k,v in next, expansions do
- expansions[k] = gsub(v,"\\", '/')
- end
+function resolvers.datastate()
+ return caches.contentstate()
end
function resolvers.variable(name)
- return entry(instance.variables,name)
+ local name = name and lpegmatch(dollarstripper,name)
+ local result = name and instance.variables[name]
+ return result ~= nil and result or ""
end
function resolvers.expansion(name)
- return entry(instance.expansions,name)
-end
-
-function resolvers.is_variable(name)
- return is_entry(instance.variables,name)
-end
-
-function resolvers.is_expansion(name)
- return is_entry(instance.expansions,name)
+ local name = name and lpegmatch(dollarstripper,name)
+ local result = name and instance.expansions[name]
+ return result ~= nil and result or ""
end
-function resolvers.unexpanded_path_list(str)
+function resolvers.unexpandedpathlist(str)
local pth = resolvers.variable(str)
- local lst = resolvers.split_path(pth)
- return expanded_path_from_list(lst)
+ local lst = resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
end
-function resolvers.unexpanded_path(str)
- return file.join_path(resolvers.unexpanded_path_list(str))
+function resolvers.unexpandedpath(str)
+ return joinpath(resolvers.unexpandedpathlist(str))
end
-do -- no longer needed
-
- local done = { }
+local done = { }
- function resolvers.reset_extra_path()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+function resolvers.resetextrapath()
+ local ep = instance.extra_paths
+ if not ep then
+ ep, done = { }, { }
+ instance.extra_paths = ep
+ elseif #ep > 0 then
+ instance.lists, done = { }, { }
end
+end
- function resolvers.register_extra_path(paths,subpaths)
- local ep = instance.extra_paths or { }
- local n = #ep
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,n do
+function resolvers.registerextrapath(paths,subpaths)
+ local ep = instance.extra_paths or { }
+ local oldn = #ep
+ local newn = oldn
+ if paths and paths ~= "" then
+ if subpaths and subpaths ~= "" then
+ for p in gmatch(paths,"[^,]+") do
-- we gmatch each step again, not that fast, but used seldom
for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
+ local ps = p .. "/" .. s
if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
+ newn = newn + 1
+ ep[newn] = resolvers.cleanpath(ps)
done[ps] = true
end
end
end
+ else
+ for p in gmatch(paths,"[^,]+") do
+ if not done[p] then
+ newn = newn + 1
+ ep[newn] = resolvers.cleanpath(p)
+ done[p] = true
+ end
+ end
end
- if #ep > 0 then
- instance.extra_paths = ep -- register paths
- end
- if #ep > n then
- instance.lists = { } -- erase the cache
+ elseif subpaths and subpaths ~= "" then
+ for i=1,oldn do
+ -- we gmatch each step again, not that fast, but used seldom
+ for s in gmatch(subpaths,"[^,]+") do
+ local ps = ep[i] .. "/" .. s
+ if not done[ps] then
+ newn = newn + 1
+ ep[newn] = resolvers.cleanpath(ps)
+ done[ps] = true
+ end
+ end
end
end
-
+ if newn > 0 then
+ instance.extra_paths = ep -- register paths
+ end
+ if newn > oldn then
+ instance.lists = { } -- erase the cache
+ end
end
local function made_list(instance,list)
@@ -9528,14 +11693,15 @@ local function made_list(instance,list)
if not ep or #ep == 0 then
return list
else
- local done, new = { }, { }
+ local done, new, newn = { }, { }, 0
-- honour . .. ../.. but only when at the start
for k=1,#list do
local v = list[k]
if not done[v] then
if find(v,"^[%.%/]$") then
done[v] = true
- new[#new+1] = v
+ newn = newn + 1
+ new[newn] = v
else
break
end
@@ -9546,7 +11712,8 @@ local function made_list(instance,list)
local v = ep[k]
if not done[v] then
done[v] = true
- new[#new+1] = v
+ newn = newn + 1
+ new[newn] = v
end
end
-- next the formal paths
@@ -9554,118 +11721,95 @@ local function made_list(instance,list)
local v = list[k]
if not done[v] then
done[v] = true
- new[#new+1] = v
+ newn = newn + 1
+ new[newn] = v
end
end
return new
end
end
-function resolvers.clean_path_list(str)
- local t = resolvers.expanded_path_list(str)
+function resolvers.cleanpathlist(str)
+ local t = resolvers.expandedpathlist(str)
if t then
for i=1,#t do
- t[i] = file.collapse_path(resolvers.clean_path(t[i]))
+ t[i] = collapsepath(resolvers.cleanpath(t[i]))
end
end
return t
end
-function resolvers.expand_path(str)
- return file.join_path(resolvers.expanded_path_list(str))
+function resolvers.expandpath(str)
+ return joinpath(resolvers.expandedpathlist(str))
end
-function resolvers.expanded_path_list(str)
+function resolvers.expandedpathlist(str)
if not str then
- return ep or { } -- ep ?
+ return { }
elseif instance.savelists then
- -- engine+progname hash
- str = gsub(str,"%$","")
+ str = lpegmatch(dollarstripper,str)
if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.split_path(resolvers.expansion(str)))
- instance.lists[str] = expanded_path_from_list(lst)
+ local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ instance.lists[str] = expandedpathfromlist(lst)
end
return instance.lists[str]
else
- local lst = resolvers.split_path(resolvers.expansion(str))
- return made_list(instance,expanded_path_from_list(lst))
- end
-end
-
-function resolvers.expanded_path_list_from_var(str) -- brrr
- local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$",""))
- if tmp ~= "" then
- return resolvers.expanded_path_list(tmp)
- else
- return resolvers.expanded_path_list(str)
+ local lst = resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst))
end
end
-function resolvers.expand_path_from_var(str)
- return file.join_path(resolvers.expanded_path_list_from_var(str))
+function resolvers.expandedpathlistfromvariable(str) -- brrr
+ str = lpegmatch(dollarstripper,str)
+ local tmp = resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
end
-function resolvers.format_of_var(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-function resolvers.format_of_suffix(str)
- return suffixmap[file.extname(str)] or 'tex'
+function resolvers.expandpathfromvariable(str)
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
-function resolvers.variable_of_format(str)
- return formats[str] or formats[alternatives[str]] or ''
+function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
+ local ori = resolvers.variable(str)
+ local pth = expandedpathfromlist(resolvers.splitpath(ori))
+ return joinpath(pth)
end
-function resolvers.var_of_format_or_suffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = formats[alternatives[str]]
- if v then
- return v
- end
- v = suffixmap[file.extname(str)]
- if v then
- return formats[isf]
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name] = content
+ else
+ instance.files[name] = { }
+ if somerror == true then -- can be unset
+ instance.loaderror = someerror
+ end
end
- return ''
-end
-
-function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
- local ori = resolvers.variable(str)
- local pth = expanded_path_from_list(resolvers.split_path(ori))
- return file.join_path(pth)
end
-resolvers.isreadable = { }
-
-function resolvers.isreadable.file(name)
- local readable = lfs.isfile(name) -- brrr
+local function isreadable(name)
+ local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir
if trace_detail then
if readable then
- logs.report("fileio","file '%s' is readable",name)
+ report_resolving("file '%s' is readable",name)
else
- logs.report("fileio","file '%s' is not readable", name)
+ report_resolving("file '%s' is not readable", name)
end
end
return readable
end
-resolvers.isreadable.tex = resolvers.isreadable.file
-
-- name
-- name/name
local function collect_files(names)
- local filelist = { }
+ local filelist, noffiles = { }, 0
for k=1,#names do
local fname = names[k]
if trace_detail then
- logs.report("fileio","checking name '%s'",fname)
+ report_resolving("checking name '%s'",fname)
end
- local bname = file.basename(fname)
- local dname = file.dirname(fname)
+ local bname = filebasename(fname)
+ local dname = filedirname(fname)
if dname == "" or find(dname,"^%.") then
dname = false
else
@@ -9674,11 +11818,11 @@ local function collect_files(names)
local hashes = instance.hashes
for h=1,#hashes do
local hash = hashes[h]
- local blobpath = hash.tag
+ local blobpath = hash.name
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
+ report_resolving("deep checking '%s' (%s)",blobpath,bname)
end
local blobfile = files[bname]
if not blobfile then
@@ -9690,62 +11834,51 @@ local function collect_files(names)
end
end
if blobfile then
+ local blobroot = files.__path__ or blobpath
if type(blobfile) == 'string' then
if not dname or find(blobfile,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,blobfile,bname), -- search
- resolvers.concatinators[hash.type](blobpath,blobfile,bname) -- result
- }
+ local variant = hash.type
+ -- local search = filejoin(blobpath,blobfile,bname)
+ local search = filejoin(blobroot,blobfile,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
+ end
+ noffiles = noffiles + 1
+ filelist[noffiles] = { variant, search, result }
end
else
for kk=1,#blobfile do
local vv = blobfile[kk]
if not dname or find(vv,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,vv,bname), -- search
- resolvers.concatinators[hash.type](blobpath,vv,bname) -- result
- }
+ local variant = hash.type
+ -- local search = filejoin(blobpath,vv,bname)
+ local search = filejoin(blobroot,vv,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
+ if trace_detail then
+ report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
+ end
+ noffiles = noffiles + 1
+ filelist[noffiles] = { variant, search, result }
end
end
end
end
elseif trace_locating then
- logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
+ report_resolving("no match in '%s' (%s)",blobpath,bname)
end
end
end
- if #filelist > 0 then
- return filelist
- else
- return nil
- end
-end
-
-function resolvers.suffix_of_format(str)
- if suffixes[str] then
- return suffixes[str][1]
- else
- return ""
- end
-end
-
-function resolvers.suffixes_of_format(str)
- if suffixes[str] then
- return suffixes[str]
- else
- return {}
- end
+ return noffiles > 0 and filelist or nil
end
-function resolvers.register_in_trees(name)
+function resolvers.registerintrees(name)
if not find(name,"^%.") then
instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
end
end
--- split the next one up for readability (bu this module needs a cleanup anyway)
+-- split the next one up for readability (but this module needs a cleanup anyway)
local function can_be_dir(name) -- can become local
local fakepaths = instance.fakepaths
@@ -9756,61 +11889,62 @@ local function can_be_dir(name) -- can become local
fakepaths[name] = 2 -- no directory
end
end
- return (fakepaths[name] == 1)
+ return fakepaths[name] == 1
end
-local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
- local result = collected or { }
+local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
+
+-- this one is split in smaller functions but it needs testing
+
+local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
+ local result = { }
local stamp = nil
- filename = file.collapse_path(filename)
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
-- speed up / beware: format problem
- if instance.remember then
- stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
+ if instance.remember and not allresults then
+ stamp = filename .. "--" .. askedformat
if instance.found[stamp] then
if trace_locating then
- logs.report("fileio","remembering file '%s'",filename)
+ report_resolving("remembered file '%s'",filename)
end
+ resolvers.registerintrees(filename) -- for tracing used files
return instance.found[stamp]
end
end
- if not dangerous[instance.format or "?"] then
- if resolvers.isreadable.file(filename) then
+ if not dangerous[askedformat] then
+ if isreadable(filename) then
if trace_detail then
- logs.report("fileio","file '%s' found directly",filename)
+ report_resolving("file '%s' found directly",filename)
+ end
+ if stamp then
+ instance.found[stamp] = { filename }
end
- instance.found[stamp] = { filename }
return { filename }
end
end
if find(filename,'%*') then
if trace_locating then
- logs.report("fileio","checking wildcard '%s'", filename)
+ report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.find_wildcard_files(filename)
+ result = resolvers.findwildcardfiles(filename) -- we can use th elocal
elseif file.is_qualified_path(filename) then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolving("qualified name '%s'", filename)
end
result = { filename }
else
- local forcedname, ok, suffix = "", false, file.extname(filename)
+ local forcedname, ok, suffix = "", false, fileextname(filename)
if suffix == "" then -- why
- if instance.format == "" then
- forcedname = filename .. ".tex"
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing standard filetype 'tex'")
- end
- result, ok = { forcedname }, true
- end
- else
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
+ if isreadable(forcedname) then
if trace_locating then
- logs.report("fileio","no suffix, forcing format filetype '%s'", s)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
result, ok = { forcedname }, true
break
@@ -9821,23 +11955,24 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not ok and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
- local basename = file.basename(filename)
- local pattern = gsub(filename .. "$","([%.%-])","%%%1")
- local savedformat = instance.format
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
local format = savedformat or ""
if format == "" then
- instance.format = resolvers.format_of_suffix(suffix)
+ askedformat = resolvers.formatofsuffix(suffix)
end
if not format then
- instance.format = "othertextfiles" -- kind of everything, maybe texinput is better
+ askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
end
--
if basename ~= filename then
- local resolved = collect_instance_files(basename)
- if #result == 0 then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #result == 0 then -- shouldn't this be resolved ?
local lowered = lower(basename)
if filename ~= lowered then
- resolved = collect_instance_files(lowered)
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
resolvers.format = savedformat
@@ -9862,57 +11997,60 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- end
end
if not ok and trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolving("qualified name '%s'", filename)
end
end
else
-- search spec
- local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, file.extname(filename)
- if ext == "" then
- if not instance.force_suffixes then
- wantedfiles[#wantedfiles+1] = filename
- end
- else
- wantedfiles[#wantedfiles+1] = filename
- end
- if instance.format == "" then
- if ext == "" then
- local forcedname = filename .. '.tex'
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.format_of_suffix(forcedname)
- if trace_locating then
- logs.report("fileio","forcing filetype '%s'",filetype)
+ local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
+ -- -- tricky as filename can be bla.1.2.3
+ -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
end
else
- filetype = resolvers.format_of_suffix(filename)
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- logs.report("fileio","using suffix based filetype '%s'",filetype)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
end
else
- if ext == "" then
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. s
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
end
- filetype = instance.format
+ filetype = askedformat
if trace_locating then
- logs.report("fileio","using given filetype '%s'",filetype)
+ report_resolving("using given filetype '%s'",filetype)
end
end
- local typespec = resolvers.variable_of_format(filetype)
- local pathlist = resolvers.expanded_path_list(typespec)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
- logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
- if fname and resolvers.isreadable.file(fname) then
+ if fname and isreadable(fname) then
filename, done = fname, true
- result[#result+1] = file.join('.',fname)
+ result[#result+1] = filejoin('.',fname)
break
end
end
@@ -9920,8 +12058,8 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local filelist = collect_files(wantedfiles)
local fl = filelist and filelist[1]
if fl then
- filename = fl[3]
- result[#result+1] = filename
+ filename = fl[3] -- not local?
+ result[#result+1] = resolvers.resolve(filename)
done = true
end
else
@@ -9930,76 +12068,65 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local dirlist = { }
if filelist then
for i=1,#filelist do
- dirlist[i] = file.dirname(filelist[i][2]) .. "/"
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
end
if trace_detail then
- logs.report("fileio","checking filename '%s'",filename)
+ report_resolving("checking filename '%s'",filename)
end
- -- a bit messy ... esp the doscan setting here
- local doscan
for k=1,#pathlist do
local path = pathlist[k]
- if find(path,"^!!") then doscan = false else doscan = true end
- local pathname = gsub(path,"^!+", '')
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
done = false
-- using file list
if filelist then
- local expression
-- compare list entries with permitted pattern -- /xx /xx//
- if not find(pathname,"/$") then
- expression = pathname .. "/"
- else
- expression = pathname
- end
- expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
- expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
- expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
- expression = "^" .. expression .. "$"
+ local expression = makepathexpression(pathname)
if trace_detail then
- logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
local f = fl[2]
local d = dirlist[k]
if find(d,expression) then
- --- todo, test for readable
- result[#result+1] = fl[3]
- resolvers.register_in_trees(f) -- for tracing used files
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
done = true
- if instance.allresults then
+ if allresults then
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
else
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
end
break
end
elseif trace_detail then
- logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
end
if not done and doscan then
-- check if on disk / unchecked / does not work at all / also zips
- if resolvers.splitmethod(pathname).scheme == 'file' then -- ?
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
local pname = gsub(pathname,"%.%*$",'')
if not find(pname,"%*") then
local ppname = gsub(pname,"/+$","")
if can_be_dir(ppname) then
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local fname = file.join(ppname,w)
- if resolvers.isreadable.file(fname) then
+ local fname = filejoin(ppname,w)
+ if isreadable(fname) then
if trace_detail then
- logs.report("fileio","found '%s' by scanning",fname)
+ report_resolving("found '%s' by scanning",fname)
end
result[#result+1] = fname
done = true
- if not instance.allresults then break end
+ if not allresults then break end
end
end
else
@@ -10009,56 +12136,66 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
end
if not done and doscan then
- -- todo: slow path scanning
+ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
end
- if done and not instance.allresults then break end
+ if done and not allresults then break end
end
end
end
for k=1,#result do
- result[k] = file.collapse_path(result[k])
+ local rk = collapsepath(result[k])
+ result[k] = rk
+ resolvers.registerintrees(rk) -- for tracing used files
end
- if instance.remember then
+ if stamp then
instance.found[stamp] = result
end
return result
end
-if not resolvers.concatinators then resolvers.concatinators = { } end
+-- -- -- begin of main file search routing -- -- --
-resolvers.concatinators.tex = file.join
-resolvers.concatinators.file = resolvers.concatinators.tex
-function resolvers.find_files(filename,filetype,mustexist)
- if type(mustexist) == boolean then
- -- all set
- elseif type(filetype) == 'boolean' then
- filetype, mustexist = nil, false
- elseif type(filetype) ~= 'string' then
- filetype, mustexist = nil, false
- end
- instance.format = filetype or ''
- local result = collect_instance_files(filename)
+
+
+
+
+
+
+
+
+-- -- -- end of main file search routing -- -- --
+
+local function findfiles(filename,filetype,allresults)
+ local result = collect_instance_files(filename,filetype or "",allresults)
if #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered)
+ return collect_instance_files(lowered,filetype or "",allresults)
end
end
- instance.format = ''
return result
end
-function resolvers.find_file(filename,filetype,mustexist)
- return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
+function resolvers.findfiles(filename,filetype)
+ return findfiles(filename,filetype,true)
+end
+
+function resolvers.findfile(filename,filetype)
+ return findfiles(filename,filetype,false)[1] or ""
+end
+
+function resolvers.findpath(filename,filetype)
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
-function resolvers.find_given_files(filename)
- local bname, result = file.basename(filename), { }
+local function findgivenfiles(filename,allresults)
+ local bname, result = filebasename(filename), { }
local hashes = instance.hashes
+ local noffound = 0
for k=1,#hashes do
local hash = hashes[k]
- local files = instance.files[hash.tag] or { }
+ local files = instance.files[hash.name] or { }
local blist = files[bname]
if not blist then
local rname = "remap:"..bname
@@ -10070,13 +12207,21 @@ function resolvers.find_given_files(filename)
end
if blist then
if type(blist) == 'string' then
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,blist,bname) or ""
- if not instance.allresults then break end
+ local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
+ if found ~= "" then
+ noffound = noffound + 1
+ result[noffound] = resolvers.resolve(found)
+ if not allresults then break end
+ end
else
for kk=1,#blist do
local vv = blist[kk]
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,vv,bname) or ""
- if not instance.allresults then break end
+ local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
+ if found ~= "" then
+ noffound = noffound + 1
+ result[noffound] = resolvers.resolve(found)
+ if not allresults then break end
+ end
end
end
end
@@ -10084,24 +12229,31 @@ function resolvers.find_given_files(filename)
return result
end
-function resolvers.find_given_file(filename)
- return (resolvers.find_given_files(filename)[1] or "")
+function resolvers.findgivenfiles(filename)
+ return findgivenfiles(filename,true)
end
-local function doit(path,blist,bname,tag,kind,result,allresults)
+function resolvers.findgivenfile(filename)
+ return findgivenfiles(filename,false)[1] or ""
+end
+
+local function doit(path,blist,bname,tag,variant,result,allresults)
local done = false
- if blist and kind then
+ if blist and variant then
+ local resolve = resolvers.resolve -- added
if type(blist) == 'string' then
-- make function and share code
if find(lower(blist),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,blist,bname) or ""
+ local full = methodhandler('concatinators',variant,tag,blist,bname) or ""
+ result[#result+1] = resolve(full)
done = true
end
else
for kk=1,#blist do
local vv = blist[kk]
if find(lower(vv),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,vv,bname) or ""
+ local full = methodhandler('concatinators',variant,tag,vv,bname) or ""
+ result[#result+1] = resolve(full)
done = true
if not allresults then break end
end
@@ -10111,30 +12263,32 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
-function resolvers.find_wildcard_files(filename) -- todo: remap:
- local result = { }
- local bname, dname = file.basename(filename), file.dirname(filename)
- local path = gsub(dname,"^*/","")
- path = gsub(path,"*",".*")
- path = gsub(path,"-","%%-")
- if dname == "" then
- path = ".*"
- end
- local name = bname
- name = gsub(name,"*",".*")
- name = gsub(name,"-","%%-")
- path = lower(path)
- name = lower(name)
- local files, allresults, done = instance.files, instance.allresults, false
+
+local makewildcard = Cs(
+ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+ + (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0
+)
+
+function resolvers.wildcardpattern(pattern)
+ return lpegmatch(makewildcard,pattern) or pattern
+end
+
+local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg
+ result = result or { }
+ local base = filebasename(filename)
+ local dirn = filedirname(filename)
+ local path = lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name = lower(lpegmatch(makewildcard,base) or base)
+ local files, done = instance.files, false
if find(name,"%*") then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- for kk, hh in next, files[hash.tag] do
+ local hashname, hashtype = hash.name, hash.type
+ for kk, hh in next, files[hashname] do
if not find(kk,"^remap:") then
if find(lower(kk),name) then
- if doit(path,hh,kk,tag,kind,result,allresults) then done = true end
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -10144,8 +12298,8 @@ function resolvers.find_wildcard_files(filename) -- todo: remap:
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- if doit(path,files[tag][bname],bname,tag,kind,result,allresults) then done = true end
+ local hashname, hashtype = hash.name, hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -10154,8 +12308,12 @@ function resolvers.find_wildcard_files(filename) -- todo: remap:
return result
end
-function resolvers.find_wildcard_file(filename)
- return (resolvers.find_wildcard_files(filename)[1] or "")
+function resolvers.findwildcardfiles(filename,result)
+ return findwildcardfiles(filename,true,result)
+end
+
+function resolvers.findwildcardfile(filename)
+ return findwildcardfiles(filename,false)[1] or ""
end
-- main user functions
@@ -10166,34 +12324,33 @@ end
function resolvers.load(option)
statistics.starttiming(instance)
- resolvers.resetconfig()
- resolvers.identify_cnf()
- resolvers.load_lua() -- will become the new method
- resolvers.expand_variables()
- resolvers.load_cnf() -- will be skipped when we have a lua file
- resolvers.expand_variables()
+ identify_configuration_files()
+ load_configuration_files()
if option ~= "nofiles" then
- resolvers.load_hash()
+ load_databases()
resolvers.automount()
end
statistics.stoptiming(instance)
+ local files = instance.files
+ return files and next(files) and true
+end
+
+local function report(str)
+ if trace_locating then
+ report_resolving(str) -- has already verbose
+ else
+ print(str)
+ end
end
-function resolvers.for_files(command, files, filetype, mustexist)
+function resolvers.dowithfilesandreport(command, files, ...) -- will move
if files and #files > 0 then
- local function report(str)
- if trace_locating then
- logs.report("fileio",str) -- has already verbose
- else
- print(str)
- end
- end
if trace_locating then
report('') -- ?
end
for f=1,#files do
local file = files[f]
- local result = command(file,filetype,mustexist)
+ local result = command(file,...)
if type(result) == 'string' then
report(result)
else
@@ -10205,21 +12362,16 @@ function resolvers.for_files(command, files, filetype, mustexist)
end
end
--- strtab
+-- obsolete
-resolvers.var_value = resolvers.variable -- output the value of variable $STRING.
-resolvers.expand_var = resolvers.expansion -- output variable expansion of STRING.
+-- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING.
+-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
-function resolvers.show_path(str) -- output search path for file type NAME
- return file.join_path(resolvers.expanded_path_list(resolvers.format_of_var(str)))
+function resolvers.showpath(str) -- output search path for file type NAME
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
--- resolvers.find_file(filename)
--- resolvers.find_file(filename, filetype, mustexist)
--- resolvers.find_file(filename, mustexist)
--- resolvers.find_file(filename, filetype)
-
-function resolvers.register_file(files, name, path)
+function resolvers.registerfile(files, name, path)
if files[name] then
if type(files[name]) == 'string' then
files[name] = { files[name], path }
@@ -10231,101 +12383,24 @@ function resolvers.register_file(files, name, path)
end
end
-function resolvers.splitmethod(filename)
- if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
- return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original=filename } -- quick hack
- else
- return url.hashed(filename)
- end
-end
-
-function table.sequenced(t,sep) -- temp here
- local s = { }
- for k, v in next, t do -- indexed?
- s[#s+1] = k .. "=" .. tostring(v)
- end
- return concat(s, sep or " | ")
-end
-
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapse_path(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- if resolvers[what][scheme] then
- if trace_locating then
- logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
- end
- return resolvers[what][scheme](filename,filetype) -- todo: specification
- else
- return resolvers[what].tex(filename,filetype) -- todo: specification
- end
-end
-
-function resolvers.clean_path(str)
- if str then
- str = gsub(str,"\\","/")
- str = gsub(str,"^!+","")
- str = gsub(str,"^~",resolvers.homedir)
- return str
- else
- return nil
- end
-end
-
-function resolvers.do_with_path(name,func)
- local pathlist = resolvers.expanded_path_list(name)
+function resolvers.dowithpath(name,func)
+ local pathlist = resolvers.expandedpathlist(name)
for i=1,#pathlist do
- func("^"..resolvers.clean_path(pathlist[i]))
+ func("^"..resolvers.cleanpath(pathlist[i]))
end
end
-function resolvers.do_with_var(name,func)
- func(expanded_var(name))
-end
-
-function resolvers.with_files(pattern,handle)
- local hashes = instance.hashes
- for i=1,#hashes do
- local hash = hashes[i]
- local blobpath = hash.tag
- local blobtype = hash.type
- if blobpath then
- local files = instance.files[blobpath]
- if files then
- for k,v in next, files do
- if find(k,"^remap:") then
- k = files[k]
- v = files[k] -- chained
- end
- if find(k,pattern) then
- if type(v) == "string" then
- handle(blobtype,blobpath,v,k)
- else
- for _,vv in next, v do -- indexed
- handle(blobtype,blobpath,vv,k)
- end
- end
- end
- end
- end
- end
- end
+function resolvers.dowithvariable(name,func)
+ func(expandedvariable(name))
end
-function resolvers.locate_format(name)
- local barename, fmtname = gsub(name,"%.%a+$",""), ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
+function resolvers.locateformat(name)
+ local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
+ local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
- fmtname = resolvers.find_files(barename..".fmt")[1] or ""
+ fmtname = resolvers.findfile(barename..".fmt")
+ fmtname = resolvers.cleanpath(fmtname)
end
- fmtname = resolvers.clean_path(fmtname)
if fmtname ~= "" then
local barename = file.removesuffix(fmtname)
local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
@@ -10340,7 +12415,7 @@ function resolvers.locate_format(name)
return nil, nil
end
-function resolvers.boolean_variable(str,default)
+function resolvers.booleanvariable(str,default)
local b = resolvers.expansion(str)
if b == "" then
return default
@@ -10350,204 +12425,62 @@ function resolvers.boolean_variable(str,default)
end
end
-texconfig.kpse_init = false
-
-kpse = { original = kpse } setmetatable(kpse, { __index = function(k,v) return resolvers[v] end } )
-
--- for a while
-
-input = resolvers
-
-
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['data-tmp'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
This module deals with caching data. It sets up the paths and
-implements loaders and savers for tables. Best is to set the
-following variable. When not set, the usual paths will be
-checked. Personally I prefer the (users) temporary path.
-
-
-TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.
-
-
-Currently we do no locking when we write files. This is no real
-problem because most caching involves fonts and the chance of them
-being written at the same time is small. We also need to extend
-luatools with a recache feature.
---ldx]]--
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -- not used yet
-
-caches = caches or { }
-
-caches.path = caches.path or nil
-caches.base = caches.base or "luatex-cache"
-caches.more = caches.more or "context"
-caches.direct = false -- true is faster but may need huge amounts of memory
-caches.tree = false
-caches.paths = caches.paths or nil
-caches.force = true -- changed for TEXLIVE by TH
-caches.defaults = { "TEXMFCACHE", "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" }
-
-function caches.temp()
- local cachepath = nil
- local function check(list,isenv)
- if not cachepath then
- for k=1,#list do
- local v = list[k]
- cachepath = (isenv and (os.env[v] or "")) or v or ""
- if cachepath == "" then
- -- next
- else
- cachepath = resolvers.clean_path(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then -- lfs.attributes(cachepath,"mode") == "directory"
- break
- elseif caches.force or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then
- dir.mkdirs(cachepath)
- if lfs.isdir(cachepath) and file.iswritable(cachepath) then
- break
+function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead
+ local instance = resolvers.instance
+ local hashes = instance.hashes
+ for i=1,#hashes do
+ local hash = hashes[i]
+ local blobtype = hash.type
+ local blobpath = hash.name
+ if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
+ local files = instance.files[blobpath]
+ local total, checked, done = 0, 0, 0
+ if files then
+ for k,v in next, files do
+ total = total + 1
+ if find(k,"^remap:") then
+ k = files[k]
+ v = k -- files[k] -- chained
+ end
+ if find(k,pattern) then
+ if type(v) == "string" then
+ checked = checked + 1
+ if handle(blobtype,blobpath,v,k) then
+ done = done + 1
+ end
+ else
+ checked = checked + #v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done = done + 1
+ end
+ end
end
end
end
- cachepath = nil
+ end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
end
end
end
- check(resolvers.clean_path_list("TEXMFCACHE") or { })
- check(caches.defaults,true)
- if not cachepath then
- print("\nfatal error: there is no valid (writable) cache path defined\n")
- os.exit()
- elseif not lfs.isdir(cachepath) then -- lfs.attributes(cachepath,"mode") ~= "directory"
- print(format("\nfatal error: cache path %s is not a directory\n",cachepath))
- os.exit()
- end
- cachepath = file.collapse_path(cachepath)
- function caches.temp()
- return cachepath
- end
- return cachepath
-end
-
-function caches.configpath()
- return table.concat(resolvers.instance.cnffiles,";")
-end
-
-function caches.hashed(tree)
- return md5.hex(gsub(lower(tree),"[\\\/]+","/"))
-end
-
-function caches.treehash()
- local tree = caches.configpath()
- if not tree or tree == "" then
- return false
- else
- return caches.hashed(tree)
- end
-end
-
-function caches.setpath(...)
- if not caches.path then
- if not caches.path then
- caches.path = caches.temp()
- end
- caches.path = resolvers.clean_path(caches.path) -- to be sure
- caches.tree = caches.tree or caches.treehash()
- if caches.tree then
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more,caches.tree)
- else
- caches.path = dir.mkdirs(caches.path,caches.base,caches.more)
- end
- end
- if not caches.path then
- caches.path = '.'
- end
- caches.path = resolvers.clean_path(caches.path)
- local dirs = { ... }
- if #dirs > 0 then
- local pth = dir.mkdirs(caches.path,...)
- return pth
- end
- caches.path = dir.expand_name(caches.path)
- return caches.path
-end
-
-function caches.definepath(category,subcategory)
- return function()
- return caches.setpath(category,subcategory)
- end
-end
-
-function caches.setluanames(path,name)
- return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc"
-end
-
-function caches.loaddata(path,name)
- local tmaname, tmcname = caches.setluanames(path,name)
- local loader = loadfile(tmcname) or loadfile(tmaname)
- if loader then
- loader = loader()
- collectgarbage("step")
- return loader
- else
- return false
- end
-end
-
---~ function caches.loaddata(path,name)
---~ local tmaname, tmcname = caches.setluanames(path,name)
---~ return dofile(tmcname) or dofile(tmaname)
---~ end
-
-function caches.iswritable(filepath,filename)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- return file.iswritable(tmaname)
-end
-
-function caches.savedata(filepath,filename,data,raw)
- local tmaname, tmcname = caches.setluanames(filepath,filename)
- local reduce, simplify = true, true
- if raw then
- reduce, simplify = false, false
- end
- data.cache_uuid = os.uuid()
- if caches.direct then
- file.savedata(tmaname, table.serialize(data,'return',false,true,false)) -- no hex
- else
- table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true
- end
- local cleanup = resolvers.boolean_variable("PURGECACHE", false)
- local strip = resolvers.boolean_variable("LUACSTRIP", true)
- utils.lua.compile(tmaname, tmcname, cleanup, strip)
end
--- here we use the cache for format loading (texconfig.[formatname|jobname])
+resolvers.obsolete = resolvers.obsolete or { }
+local obsolete = resolvers.obsolete
---~ if tex and texconfig and texconfig.formatname and texconfig.formatname == "" then
-if tex and texconfig and (not texconfig.formatname or texconfig.formatname == "") and input and resolvers.instance then
- if not texconfig.luaname then texconfig.luaname = "cont-en.lua" end -- or luc
- texconfig.formatname = caches.setpath("formats") .. "/" .. gsub(texconfig.luaname,"%.lu.$",".fmt")
-end
+resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
+resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-res'] = {
+if not modules then modules = { } end modules ['data-pre'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10555,17 +12488,28 @@ if not modules then modules = { } end modules ['data-res'] = {
license = "see context related readme files"
}
---~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex"))
+-- It could be interesting to hook the resolver in the file
+-- opener so that unresolved prefixes travel around and we
+-- get more abstraction.
-local upper, lower, gsub = string.upper, string.lower, string.gsub
+-- As we use this beforehand we will move this up in the chain
+-- of loading.
-local prefixes = { }
+
+local resolvers = resolvers
+local prefixes = utilities.storage.allocate()
+resolvers.prefixes = prefixes
+
+local gsub = string.gsub
+local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion
+local getenv = resolvers.getenv -- we can probably also use resolvers.expansion
+local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
prefixes.environment = function(str)
- return resolvers.clean_path(os.getenv(str) or os.getenv(upper(str)) or os.getenv(lower(str)) or "")
+ return cleanpath(expansion(str))
end
-prefixes.relative = function(str,n)
+prefixes.relative = function(str,n) -- lfs.isfile
if io.exists(str) then
-- nothing
elseif io.exists("./" .. str) then
@@ -10581,7 +12525,7 @@ prefixes.relative = function(str,n)
end
end
end
- return resolvers.clean_path(str)
+ return cleanpath(str)
end
prefixes.auto = function(str)
@@ -10593,18 +12537,34 @@ prefixes.auto = function(str)
end
prefixes.locate = function(str)
- local fullname = resolvers.find_given_file(str) or ""
- return resolvers.clean_path((fullname ~= "" and fullname) or str)
+ local fullname = findgivenfile(str) or ""
+ return cleanpath((fullname ~= "" and fullname) or str)
end
prefixes.filename = function(str)
- local fullname = resolvers.find_given_file(str) or ""
- return resolvers.clean_path(file.basename((fullname ~= "" and fullname) or str))
+ local fullname = findgivenfile(str) or ""
+ return cleanpath(file.basename((fullname ~= "" and fullname) or str))
end
prefixes.pathname = function(str)
- local fullname = resolvers.find_given_file(str) or ""
- return resolvers.clean_path(file.dirname((fullname ~= "" and fullname) or str))
+ local fullname = findgivenfile(str) or ""
+ return cleanpath(file.dirname((fullname ~= "" and fullname) or str))
+end
+
+prefixes.selfautoloc = function(str)
+ return cleanpath(file.join(getenv('SELFAUTOLOC'),str))
+end
+
+prefixes.selfautoparent = function(str)
+ return cleanpath(file.join(getenv('SELFAUTOPARENT'),str))
+end
+
+prefixes.selfautodir = function(str)
+ return cleanpath(file.join(getenv('SELFAUTODIR'),str))
+end
+
+prefixes.home = function(str)
+ return cleanpath(file.join(getenv('HOME'),str))
end
prefixes.env = prefixes.environment
@@ -10633,26 +12593,71 @@ local function _resolve_(method,target)
end
end
-local function resolve(str)
- if type(str) == "table" then
- for k=1,#str do
- local v = str[k]
- str[k] = resolve(v) or v
+local resolved, abstract = { }, { }
+
+function resolvers.resetresolve(str)
+ resolved, abstract = { }, { }
+end
+
+local function resolve(str) -- use schemes, this one is then for the commandline only
+ local res = resolved[str]
+ if not res then
+ res = gsub(str,"([a-z][a-z]+):([^ \"\']*)",_resolve_)
+ resolved[str] = res
+ abstract[res] = str
+ end
+ return res
+end
+
+local function unresolve(str)
+ return abstract[str] or str
+end
+
+resolvers.resolve = resolve
+resolvers.unresolve = unresolve
+
+if os.uname then
+
+ for k, v in next, os.uname() do
+ if not prefixes[k] then
+ prefixes[k] = function() return v end
+ end
+ end
+
+end
+
+if os.type == "unix" then
+
+ local pattern
+
+ local function makepattern(t,k,v)
+ local colon = P(":")
+ local p
+ for k, v in table.sortedpairs(prefixes) do
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ pattern = Cs((p * colon + colon/";" + P(1))^0)
+ if t then
+ t[k] = v
end
- elseif str and str ~= "" then
- str = gsub(str,"([a-z]+):([^ \"\']*)",_resolve_)
end
- return str
-end
-resolvers.resolve = resolve
+ makepattern()
-if os.uname then
+ getmetatable(prefixes).__newindex = makepattern
- for k, v in next, os.uname() do
- if not prefixes[k] then
- prefixes[k] = function() return v end
- end
+ function resolvers.repath(str)
+ return lpegmatch(pattern,str)
+ end
+
+else -- already the default:
+
+ function resolvers.repath(str)
+ return str
end
end
@@ -10670,13 +12675,23 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files"
}
-resolvers.finders = resolvers.finders or { }
-resolvers.openers = resolvers.openers or { }
-resolvers.loaders = resolvers.loaders or { }
+local allocate = utilities.storage.allocate
+local resolvers = resolvers
+
+local methodhandler = resolvers.methodhandler
+local registermethod = resolvers.registermethod
-resolvers.finders.notfound = { nil }
-resolvers.openers.notfound = { nil }
-resolvers.loaders.notfound = { false, nil, 0 }
+local finders = allocate { helpers = { }, notfound = function() end }
+local openers = allocate { helpers = { }, notfound = function() end }
+local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+
+registermethod("finders", finders, "uri")
+registermethod("openers", openers, "uri")
+registermethod("loaders", loaders, "uri")
+
+resolvers.finders = finders
+resolvers.openers = openers
+resolvers.loaders = loaders
end -- of closure
@@ -10691,15 +12706,23 @@ if not modules then modules = { } end modules ['data-out'] = {
license = "see context related readme files"
}
-outputs = outputs or { }
+local allocate = utilities.storage.allocate
+local resolvers = resolvers
+
+local registermethod = resolvers.registermethod
+local savers = allocate { helpers = { } }
+
+resolvers.savers = savers
+
+registermethod("savers", savers, "uri")
end -- of closure
do -- create closure to overcome 200 locals limit
-if not modules then modules = { } end modules ['data-con'] = {
+if not modules then modules = { } end modules ['data-fil'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -10707,6 +12730,124 @@ if not modules then modules = { } end modules ['data-con'] = {
license = "see context related readme files"
}
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_files = logs.reporter("resolvers","files")
+
+local resolvers = resolvers
+
+local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
+local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
+
+local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+
+function locators.file(specification)
+ local name = specification.filename
+ local realname = resolvers.resolve(name) -- no shortcut
+ if realname and realname ~= '' and lfs.isdir(realname) then
+ if trace_locating then
+ report_files("file locator '%s' found as '%s'",name,realname)
+ end
+ resolvers.appendhash('file',name,true) -- cache
+ elseif trace_locating then
+ report_files("file locator '%s' not found",name)
+ end
+end
+
+function hashers.file(specification)
+ local name = specification.filename
+ local content = caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
+end
+
+function generators.file(specification)
+ local path = specification.filename
+ local content = resolvers.scanfiles(path)
+ resolvers.registerfilehash(path,content,true)
+end
+
+concatinators.file = file.join
+
+function finders.file(specification,filetype)
+ local filename = specification.filename
+ local foundname = resolvers.findfile(filename,filetype)
+ if foundname and foundname ~= "" then
+ if trace_locating then
+ report_files("file finder: '%s' found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_files("file finder: %s' not found",filename)
+ end
+ return finders.notfound()
+ end
+end
+
+-- The default textopener will be overloaded later on.
+
+function openers.helpers.textopener(tag,filename,f)
+ return {
+ reader = function() return f:read () end,
+ close = function() logs.show_close(filename) return f:close() end,
+ }
+end
+
+function openers.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"r")
+ if f then
+ if trace_locating then
+ report_files("file opener, '%s' opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
+ end
+ end
+ if trace_locating then
+ report_files("file opener, '%s' not found",filename)
+ end
+ return openers.notfound()
+end
+
+function loaders.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_files("file loader, '%s' loaded",filename)
+ end
+ local s = f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true, s, #s
+ end
+ end
+ end
+ if trace_locating then
+ report_files("file loader, '%s' not found",filename)
+ end
+ return loaders.notfound()
+end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-con'] = {
+ version = 1.100,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
local format, lower, gsub = string.format, string.lower, string.gsub
local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
@@ -10726,50 +12867,63 @@ table structures without bothering about the disk cache.
Examples of usage can be found in the font related code.
--ldx]]--
-containers = containers or { }
-
+containers = containers or { }
+local containers = containers
containers.usecache = true
+local report_containers = logs.reporter("resolvers","containers")
+
local function report(container,tag,name)
if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
+ report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
end
end
local allocated = { }
--- tracing
+local mt = {
+ __index = function(t,k)
+ if k == "writable" then
+ local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable = writable
+ return writable
+ elseif k == "readables" then
+ local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables = readables
+ return readables
+ end
+ end,
+ __storage__ = true
+}
function containers.define(category, subcategory, version, enabled)
- return function()
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or 1.000,
- trace = false,
- path = caches and caches.setpath and caches.setpath(category,subcategory),
- }
- c[subcategory] = s
- end
- return s
- else
- return nil
+ if category and subcategory then
+ local c = allocated[category]
+ if not c then
+ c = { }
+ allocated[category] = c
+ end
+ local s = c[subcategory]
+ if not s then
+ s = {
+ category = category,
+ subcategory = subcategory,
+ storage = { },
+ enabled = enabled,
+ version = version or math.pi, -- after all, this is TeX
+ trace = false,
+ -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
+ -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
+ }
+ setmetatable(s,mt)
+ c[subcategory] = s
end
+ return s
end
end
function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.path, name)
+ return container.enabled and caches and caches.is_writable(container.writable, name)
end
function containers.is_valid(container, name)
@@ -10782,18 +12936,20 @@ function containers.is_valid(container, name)
end
function containers.read(container,name)
- if container.enabled and caches and not container.storage[name] and containers.usecache then
- container.storage[name] = caches.loaddata(container.path,name)
- if containers.is_valid(container,name) then
+ local storage = container.storage
+ local stored = storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored = caches.loaddata(container.readables,name)
+ if stored and stored.cache_version == container.version then
report(container,"loaded",name)
else
- container.storage[name] = nil
+ stored = nil
end
- end
- if container.storage[name] then
+ storage[name] = stored
+ elseif stored then
report(container,"reusing",name)
end
- return container.storage[name]
+ return stored
end
function containers.write(container, name, data)
@@ -10802,7 +12958,7 @@ function containers.write(container, name, data)
if container.enabled and caches then
local unique, shared = data.unique, data.shared
data.unique, data.shared = nil, nil
- caches.savedata(container.path, name, data)
+ caches.savedata(container.writable, name, data)
report(container,"saved",name)
data.unique, data.shared = unique, shared
end
@@ -10837,50 +12993,18 @@ local format, lower, gsub, find = string.format, string.lower, string.gsub, stri
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
--- since we want to use the cache instead of the tree, we will now
--- reimplement the saver.
-
-local save_data = resolvers.save_data
-local load_data = resolvers.load_data
-
-resolvers.cachepath = nil -- public, for tracing
-resolvers.usecache = true -- public, for tracing
+local report_mounts = logs.reporter("resolvers","mounts")
-function resolvers.save_data(dataname)
- save_data(dataname, function(cachename,dataname)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(cachename))
- else
- return file.join(cachename,dataname)
- end
- end)
-end
-
-function resolvers.load_data(pathname,dataname,filename)
- load_data(pathname,dataname,filename,function(dataname,filename)
- resolvers.usecache = not toboolean(resolvers.expansion("CACHEINTDS") or "false",true)
- if resolvers.usecache then
- resolvers.cachepath = resolvers.cachepath or caches.definepath("trees")
- return file.join(resolvers.cachepath(),caches.hashed(pathname))
- else
- if not filename or (filename == "") then
- filename = dataname
- end
- return file.join(pathname,filename)
- end
- end)
-end
+local resolvers = resolvers
-- we will make a better format, maybe something xml or just text or lua
resolvers.automounted = resolvers.automounted or { }
function resolvers.automount(usecache)
- local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT'))
+ local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT'))
if (not mountpaths or #mountpaths == 0) and usecache then
- mountpaths = { caches.setpath("mount") }
+ mountpaths = caches.getreadablepaths("mount")
end
if mountpaths and #mountpaths > 0 then
statistics.starttiming(resolvers.instance)
@@ -10894,7 +13018,7 @@ function resolvers.automount(usecache)
-- skip
elseif find(line,"^zip://") then
if trace_locating then
- logs.report("fileio","mounting %s",line)
+ report_mounts("mounting %s",line)
end
table.insert(resolvers.automounted,line)
resolvers.usezipfile(line)
@@ -10910,40 +13034,40 @@ end
-- status info
-statistics.register("used config path", function() return caches.configpath() end)
-statistics.register("used cache path", function() return caches.temp() or "?" end)
+statistics.register("used config file", function() return caches.configfiles() end)
+statistics.register("used cache path", function() return caches.usedpaths() end)
-- experiment (code will move)
-function statistics.save_fmt_status(texname,formatbanner,sourcefile) -- texname == formatname
+function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname
local enginebanner = status.list().banner
if formatbanner and enginebanner and sourcefile then
local luvname = file.replacesuffix(texname,"luv")
local luvdata = {
enginebanner = enginebanner,
formatbanner = formatbanner,
- sourcehash = md5.hex(io.loaddata(resolvers.find_file(sourcefile)) or "unknown"),
+ sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"),
sourcefile = sourcefile,
}
io.savedata(luvname,table.serialize(luvdata,true))
end
end
-function statistics.check_fmt_status(texname)
+function statistics.checkfmtstatus(texname)
local enginebanner = status.list().banner
if enginebanner and texname then
local luvname = file.replacesuffix(texname,"luv")
if lfs.isfile(luvname) then
local luv = dofile(luvname)
if luv and luv.sourcefile then
- local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown")
+ local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown")
local luvbanner = luv.enginebanner or "?"
if luvbanner ~= enginebanner then
- return string.format("engine mismatch (luv:%s <> bin:%s)",luvbanner,enginebanner)
+ return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner)
end
local luvhash = luv.sourcehash or "?"
if luvhash ~= sourcehash then
- return string.format("source mismatch (luv:%s <> bin:%s)",luvhash,sourcehash)
+ return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash)
end
else
return "invalid status file"
@@ -10968,25 +13092,30 @@ if not modules then modules = { } end modules ['data-zip'] = {
license = "see context related readme files"
}
+-- partly redone .. needs testing
+
local format, find, match = string.format, string.find, string.match
-local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_zip = logs.reporter("resolvers","zip")
+
-- zip:///oeps.zip?name=bla/bla.tex
-- zip:///oeps.zip?tree=tex/texmf-local
-- zip:///texmf.zip?tree=/tex/texmf
-- zip:///texmf.zip?tree=/tex/texmf-local
-- zip:///texmf-mine.zip?tree=/tex/texmf-projects
-zip = zip or { }
-zip.archives = zip.archives or { }
-zip.registeredfiles = zip.registeredfiles or { }
+local resolvers = resolvers
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, resolvers.concatinators
+zip = zip or { }
+local zip = zip
-local archives = zip.archives
+zip.archives = zip.archives or { }
+local archives = zip.archives
+
+zip.registeredfiles = zip.registeredfiles or { }
+local registeredfiles = zip.registeredfiles
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
@@ -11002,7 +13131,7 @@ function zip.openarchive(name)
else
local arch = archives[name]
if not arch then
- local full = resolvers.find_file(name) or ""
+ local full = resolvers.findfile(name) or ""
arch = (full ~= "" and zip.open(full)) or false
archives[name] = arch
end
@@ -11017,163 +13146,162 @@ function zip.closearchive(name)
end
end
-function locators.zip(specification) -- where is this used? startup zips (untested)
- specification = resolvers.splitmethod(specification)
- local zipfile = specification.path
- local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
+function resolvers.locators.zip(specification)
+ local archive = specification.filename
+ local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
if trace_locating then
- if zfile then
- logs.report("fileio","zip locator, archive '%s' found",specification.original)
+ if zipfile then
+ report_zip("locator, archive '%s' found",archive)
else
- logs.report("fileio","zip locator, archive '%s' not found",specification.original)
+ report_zip("locator, archive '%s' not found",archive)
end
end
end
-function hashers.zip(tag,name)
+function resolvers.hashers.zip(specification)
+ local archive = specification.filename
if trace_locating then
- logs.report("fileio","loading zip file '%s' as '%s'",name,tag)
+ report_zip("loading file '%s'",archive)
end
- resolvers.usezipfile(format("%s?tree=%s",tag,name))
+ resolvers.usezipfile(specification.original)
end
-function concatinators.zip(tag,path,name)
+function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
if not path or path == "" then
- return format('%s?name=%s',tag,name)
+ return format('%s?name=%s',zipfile,name)
else
- return format('%s?name=%s/%s',tag,path,name)
+ return format('%s?name=%s/%s',zipfile,path,name)
end
end
-function resolvers.isreadable.zip(name)
- return true
-end
-
-function finders.zip(specification,filetype)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.finders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- logs.report("fileio","zip finder, archive '%s' found",specification.path)
+ report_zip("finder, archive '%s' found",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
dfile = zfile:close()
if trace_locating then
- logs.report("fileio","zip finder, file '%s' found",q.name)
+ report_zip("finder, file '%s' found",queryname)
end
return specification.original
elseif trace_locating then
- logs.report("fileio","zip finder, file '%s' not found",q.name)
+ report_zip("finder, file '%s' not found",queryname)
end
elseif trace_locating then
- logs.report("fileio","zip finder, unknown archive '%s'",specification.path)
+ report_zip("finder, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- logs.report("fileio","zip finder, '%s' not found",filename)
+ report_zip("finder, '%s' not found",original)
end
- return unpack(finders.notfound)
+ return resolvers.finders.notfound()
end
-function openers.zip(specification)
- local zipspecification = resolvers.splitmethod(specification)
- if zipspecification.path then
- local q = url.query(zipspecification.query)
- if q.name then
- local zfile = zip.openarchive(zipspecification.path)
+function resolvers.openers.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- logs.report("fileio","zip opener, archive '%s' opened",zipspecification.path)
+ report_zip("opener, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_open(specification)
if trace_locating then
- logs.report("fileio","zip opener, file '%s' found",q.name)
+ report_zip("opener, file '%s' found",queryname)
end
- return openers.text_opener(specification,dfile,'zip')
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
elseif trace_locating then
- logs.report("fileio","zip opener, file '%s' not found",q.name)
+ report_zip("opener, file '%s' not found",queryname)
end
elseif trace_locating then
- logs.report("fileio","zip opener, unknown archive '%s'",zipspecification.path)
+ report_zip("opener, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- logs.report("fileio","zip opener, '%s' not found",filename)
+ report_zip("opener, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-function loaders.zip(specification)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.loaders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- logs.report("fileio","zip loader, archive '%s' opened",specification.path)
+ report_zip("loader, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_load(filename)
+ logs.show_load(original)
if trace_locating then
- logs.report("fileio","zip loader, file '%s' loaded",filename)
+ report_zip("loader, file '%s' loaded",original)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- logs.report("fileio","zip loader, file '%s' not found",q.name)
+ report_zip("loader, file '%s' not found",queryname)
end
elseif trace_locating then
- logs.report("fileio","zip loader, unknown archive '%s'",specification.path)
+ report_zip("loader, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- logs.report("fileio","zip loader, '%s' not found",filename)
+ report_zip("loader, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-- zip:///somefile.zip
-- zip:///somefile.zip?tree=texmf-local -> mount
-function resolvers.usezipfile(zipname)
- zipname = validzip(zipname)
- local specification = resolvers.splitmethod(zipname)
- local zipfile = specification.path
- if zipfile and not zip.registeredfiles[zipname] then
- local tree = url.query(specification.query).tree or ""
- local z = zip.openarchive(zipfile)
+function resolvers.usezipfile(archive)
+ local specification = resolvers.splitmethod(archive) -- to be sure
+ local archive = specification.filename
+ if archive and not registeredfiles[archive] then
+ local z = zip.openarchive(archive)
if z then
- local instance = resolvers.instance
+ local tree = url.query(specification.query).tree or ""
if trace_locating then
- logs.report("fileio","zip registering, registering archive '%s'",zipname)
- end
- statistics.starttiming(instance)
- resolvers.prepend_hash('zip',zipname,zipfile)
- resolvers.extend_texmf_var(zipname) -- resets hashes too
- zip.registeredfiles[zipname] = z
- instance.files[zipname] = resolvers.register_zip_file(z,tree or "")
- statistics.stoptiming(instance)
+ report_zip("registering, registering archive '%s'",archive)
+ end
+ statistics.starttiming(resolvers.instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive) -- resets hashes too
+ registeredfiles[archive] = z
+ instance.files[archive] = resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(resolvers.instance)
elseif trace_locating then
- logs.report("fileio","zip registering, unknown archive '%s'",zipname)
+ report_zip("registering, unknown archive '%s'",archive)
end
elseif trace_locating then
- logs.report("fileio","zip registering, '%s' not found",zipname)
+ report_zip("registering, '%s' not found",archive)
end
end
-function resolvers.register_zip_file(z,tree)
+function resolvers.registerzipfile(z,tree)
local files, filter = { }, ""
if tree == "" then
filter = "^(.+)/(.-)$"
@@ -11181,9 +13309,9 @@ function resolvers.register_zip_file(z,tree)
filter = format("^%s/(.+)/(.-)$",tree)
end
if trace_locating then
- logs.report("fileio","zip registering, using filter '%s'",filter)
+ report_zip("registering, using filter '%s'",filter)
end
- local register, n = resolvers.register_file, 0
+ local register, n = resolvers.registerfile, 0
for i in z:files() do
local path, name = match(i.filename,filter)
if path then
@@ -11198,11 +13326,89 @@ function resolvers.register_zip_file(z,tree)
n = n + 1
end
end
- logs.report("fileio","zip registering, %s files registered",n)
+ report_zip("registering, %s files registered",n)
return files
end
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-tre'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- \input tree://oeps1/**/oeps.tex
+
+local find, gsub, format = string.find, string.gsub, string.format
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_trees = logs.reporter("resolvers","trees")
+
+local resolvers = resolvers
+
+local done, found, notfound = { }, { }, resolvers.finders.notfound
+
+function resolvers.finders.tree(specification)
+ local spec = specification.filename
+ local fnd = found[spec]
+ if fnd == nil then
+ if spec ~= "" then
+ local path, name = file.dirname(spec), file.basename(spec)
+ if path == "" then path = "." end
+ local hash = done[path]
+ if not hash then
+ local pattern = path .. "/*" -- we will use the proper splitter
+ hash = dir.glob(pattern)
+ done[path] = hash
+ end
+ local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$"
+ for k=1,#hash do
+ local v = hash[k]
+ if find(v,pattern) then
+ found[spec] = v
+ return v
+ end
+ end
+ end
+ fnd = notfound() -- false
+ found[spec] = fnd
+ end
+ return fnd
+end
+
+function resolvers.locators.tree(specification)
+ local name = specification.filename
+ if name ~= '' and lfs.isdir(name) then
+ if trace_locating then
+ report_trees("locator '%s' found",name)
+ end
+ resolvers.appendhash('tree',name,false) -- don't cache
+ elseif trace_locating then
+ report_trees("locator '%s' not found",name)
+ end
+end
+
+function resolvers.hashers.tree(specification)
+ local name = specification.filename
+ if trace_locating then
+ report_trees("analysing '%s'",name)
+ end
+ resolvers.methodhandler("hashers",name)
+end
+
+resolvers.concatinators.tree = resolvers.concatinators.file
+resolvers.generators.tree = resolvers.generators.file
+resolvers.openers.tree = resolvers.openers.file
+resolvers.loaders.tree = resolvers.loaders.file
+
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -11215,58 +13421,59 @@ if not modules then modules = { } end modules ['data-crl'] = {
license = "see context related readme files"
}
-local gsub = string.gsub
+-- this one is replaced by data-sch.lua --
-curl = curl or { }
+local gsub = string.gsub
-curl.cached = { }
-curl.cachepath = caches.definepath("curl")
+local resolvers = resolvers
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-function curl.fetch(protocol, name)
- local cachename = curl.cachepath() .. "/" .. gsub(name,"[^%a%d%.]+","-")
--- cachename = gsub(cachename,"[\\/]", io.fileseparator)
- cachename = gsub(cachename,"[\\]", "/") -- cleanup
- if not curl.cached[name] then
+resolvers.curl = resolvers.curl or { }
+local curl = resolvers.curl
+
+local cached = { }
+
+local function runcurl(specification)
+ local original = specification.original
+ -- local scheme = specification.scheme
+ local cleanname = gsub(original,"[^%a%d%.]+","-")
+ local cachename = caches.setfirstwritablefile(cleanname,"curl")
+ if not cached[original] then
if not io.exists(cachename) then
- curl.cached[name] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
+ cached[original] = cachename
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
os.spawn(command)
end
if io.exists(cachename) then
- curl.cached[name] = cachename
+ cached[original] = cachename
else
- curl.cached[name] = ""
+ cached[original] = ""
end
end
- return curl.cached[name]
-end
-
-function finders.curl(protocol,filename)
- local foundname = curl.fetch(protocol, filename)
- return finders.generic(protocol,foundname,filetype)
+ return cached[original]
end
-function openers.curl(protocol,filename)
- return openers.generic(protocol,filename)
-end
+-- old code: we could be cleaner using specification (see schemes)
-function loaders.curl(protocol,filename)
- return loaders.generic(protocol,filename)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",runcurl(specification),filetype)
end
--- todo: metamethod
+local opener = openers.file
+local loader = loaders.file
-function curl.install(protocol)
- finders[protocol] = function (filename,filetype) return finders.curl(protocol,filename) end
- openers[protocol] = function (filename) return openers.curl(protocol,filename) end
- loaders[protocol] = function (filename) return loaders.curl(protocol,filename) end
+local function install(scheme)
+ finders[scheme] = finder
+ openers[scheme] = opener
+ loaders[scheme] = loader
end
-curl.install('http')
-curl.install('https')
-curl.install('ftp')
+resolvers.curl.install = install
+
+install('http')
+install('https')
+install('ftp')
end -- of closure
@@ -11287,9 +13494,13 @@ if not modules then modules = { } end modules ['data-lua'] = {
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local report_libraries = logs.reporter("resolvers","libraries")
+
local gsub, insert = string.gsub, table.insert
local unpack = unpack or table.unpack
+local resolvers, package = resolvers, package
+
local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs'
local clibformats = { 'lib' }
@@ -11298,7 +13509,7 @@ local _path_, libpaths, _cpath_, clibpaths
function package.libpaths()
if not _path_ or package.path ~= _path_ then
_path_ = package.path
- libpaths = file.split_path(_path_,";")
+ libpaths = file.splitpath(_path_,";")
end
return libpaths
end
@@ -11306,7 +13517,7 @@ end
function package.clibpaths()
if not _cpath_ or package.cpath ~= _cpath_ then
_cpath_ = package.cpath
- clibpaths = file.split_path(_cpath_,";")
+ clibpaths = file.splitpath(_cpath_,";")
end
return clibpaths
end
@@ -11315,18 +13526,18 @@ local function thepath(...)
local t = { ... } t[#t+1] = "?.lua"
local path = file.join(unpack(t))
if trace_locating then
- logs.report("fileio","! appending '%s' to 'package.path'",path)
+ report_libraries("! appending '%s' to 'package.path'",path)
end
return path
end
local p_libpaths, a_libpaths = { }, { }
-function package.append_libpath(...)
+function package.appendtolibpath(...)
insert(a_libpath,thepath(...))
end
-function package.prepend_libpath(...)
+function package.prependtolibpath(...)
insert(p_libpaths,1,thepath(...))
end
@@ -11337,31 +13548,30 @@ local function loaded(libpaths,name,simple)
local libpath = libpaths[i]
local resolved = gsub(libpath,"%?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
+ report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.path': '%s'",name,resolved)
+ report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
return loadfile(resolved)
end
end
end
-
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
- logs.report("fileio","! locating '%s'",name)
+ report_libraries("! locating '%s'",name)
end
for i=1,#libformats do
local format = libformats[i]
- local resolved = resolvers.find_file(name,format) or ""
+ local resolved = resolvers.findfile(name,format) or ""
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'libformat path': '%s'",name,format)
+ report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format)
end
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located via environment: '%s'",name,resolved)
+ report_libraries("! lib '%s' located via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
@@ -11379,16 +13589,16 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
for i=1,#clibformats do
-- better have a dedicated loop
local format = clibformats[i]
- local paths = resolvers.expanded_path_list_from_var(format)
+ local paths = resolvers.expandedpathlistfromvariable(format)
for p=1,#paths do
local path = paths[p]
local resolved = file.join(path,libname)
if trace_locating then -- mode detail
- logs.report("fileio","! checking for '%s' using 'clibformat path': '%s'",libname,path)
+ report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'clibformat': '%s'",libname,resolved)
+ report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
return package.loadlib(resolved,name)
end
@@ -11398,140 +13608,43 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
local libpath = clibpaths[i]
local resolved = gsub(libpath,"?",simple)
if trace_locating then -- more detail
- logs.report("fileio","! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
+ report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
- logs.report("fileio","! lib '%s' located via 'package.cpath': '%s'",name,resolved)
+ report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
return package.loadlib(resolved,name)
end
end
-- just in case the distribution is messed up
if trace_loading then -- more detail
- logs.report("fileio","! checking for '%s' using 'luatexlibs': '%s'",name)
+ report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name)
end
- local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or ""
+ local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or ""
if resolved ~= "" then
if trace_locating then
- logs.report("fileio","! lib '%s' located by basename via environment: '%s'",name,resolved)
+ report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved)
end
return loadfile(resolved)
end
if trace_locating then
- logs.report("fileio",'? unable to locate lib: %s',name)
+ report_libraries('? unable to locate lib: %s',name)
end
-- return "unable to locate " .. name
end
resolvers.loadlualib = require
+-- -- -- --
-end -- of closure
-
-do -- create closure to overcome 200 locals limit
-
-if not modules then modules = { } end modules ['luat-kps'] = {
- version = 1.001,
- comment = "companion to luatools.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-This file is used when we want the input handlers to behave like
-kpsewhich. What to do with the following:
-
-
-{$SELFAUTOLOC,$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}
-$SELFAUTOLOC : /usr/tex/bin/platform
-$SELFAUTODIR : /usr/tex/bin
-$SELFAUTOPARENT : /usr/tex
-
-
-How about just forgetting about them?
---ldx]]--
-
-local suffixes = resolvers.suffixes
-local formats = resolvers.formats
-
-suffixes['gf'] = { 'gf' }
-suffixes['pk'] = { 'pk' }
-suffixes['base'] = { 'base' }
-suffixes['bib'] = { 'bib' }
-suffixes['bst'] = { 'bst' }
-suffixes['cnf'] = { 'cnf' }
-suffixes['mem'] = { 'mem' }
-suffixes['mf'] = { 'mf' }
-suffixes['mfpool'] = { 'pool' }
-suffixes['mft'] = { 'mft' }
-suffixes['mppool'] = { 'pool' }
-suffixes['graphic/figure'] = { 'eps', 'epsi' }
-suffixes['texpool'] = { 'pool' }
-suffixes['PostScript header'] = { 'pro' }
-suffixes['ist'] = { 'ist' }
-suffixes['web'] = { 'web', 'ch' }
-suffixes['cweb'] = { 'w', 'web', 'ch' }
-suffixes['cmap files'] = { 'cmap' }
-suffixes['lig files'] = { 'lig' }
-suffixes['bitmap font'] = { }
-suffixes['MetaPost support'] = { }
-suffixes['TeX system documentation'] = { }
-suffixes['TeX system sources'] = { }
-suffixes['dvips config'] = { }
-suffixes['type42 fonts'] = { }
-suffixes['web2c files'] = { }
-suffixes['other text files'] = { }
-suffixes['other binary files'] = { }
-suffixes['opentype fonts'] = { 'otf' }
-
-suffixes['fmt'] = { 'fmt' }
-suffixes['texmfscripts'] = { 'rb','lua','py','pl' }
-
-suffixes['pdftex config'] = { }
-suffixes['Troff fonts'] = { }
-
-suffixes['ls-R'] = { }
+package.obsolete = package.obsolete or { }
---[[ldx--
-If you wondered abou tsome of the previous mappings, how about
-the next bunch:
---ldx]]--
+package.append_libpath = appendtolibpath -- will become obsolete
+package.prepend_libpath = prependtolibpath -- will become obsolete
-formats['bib'] = ''
-formats['bst'] = ''
-formats['mft'] = ''
-formats['ist'] = ''
-formats['web'] = ''
-formats['cweb'] = ''
-formats['MetaPost support'] = ''
-formats['TeX system documentation'] = ''
-formats['TeX system sources'] = ''
-formats['Troff fonts'] = ''
-formats['dvips config'] = ''
-formats['graphic/figure'] = ''
-formats['ls-R'] = ''
-formats['other text files'] = ''
-formats['other binary files'] = ''
-
-formats['gf'] = ''
-formats['pk'] = ''
-formats['base'] = 'MFBASES'
-formats['cnf'] = ''
-formats['mem'] = 'MPMEMS'
-formats['mf'] = 'MFINPUTS'
-formats['mfpool'] = 'MFPOOL'
-formats['mppool'] = 'MPPOOL'
-formats['texpool'] = 'TEXPOOL'
-formats['PostScript header'] = 'TEXPSHEADERS'
-formats['cmap files'] = 'CMAPFONTS'
-formats['type42 fonts'] = 'T42FONTS'
-formats['web2c files'] = 'WEB2C'
-formats['pdftex config'] = 'PDFTEXCONFIG'
-formats['texmfscripts'] = 'TEXMFSCRIPTS'
-formats['bitmap font'] = ''
-formats['lig files'] = 'LIGFONTS'
+package.obsolete.append_libpath = appendtolibpath -- will become obsolete
+package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete
end -- of closure
@@ -11547,49 +13660,54 @@ if not modules then modules = { } end modules ['data-aux'] = {
}
local find = string.find
+local type, next = type, next
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix
+local resolvers = resolvers
+
+local report_scripts = logs.reporter("resolvers","scripts")
+
+function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix
local scriptpath = "scripts/context/lua"
newname = file.addsuffix(newname,"lua")
- local oldscript = resolvers.clean_path(oldname)
+ local oldscript = resolvers.cleanpath(oldname)
if trace_locating then
- logs.report("fileio","to be replaced old script %s", oldscript)
+ report_scripts("to be replaced old script %s", oldscript)
end
- local newscripts = resolvers.find_files(newname) or { }
+ local newscripts = resolvers.findfiles(newname) or { }
if #newscripts == 0 then
if trace_locating then
- logs.report("fileio","unable to locate new script")
+ report_scripts("unable to locate new script")
end
else
for i=1,#newscripts do
- local newscript = resolvers.clean_path(newscripts[i])
+ local newscript = resolvers.cleanpath(newscripts[i])
if trace_locating then
- logs.report("fileio","checking new script %s", newscript)
+ report_scripts("checking new script %s", newscript)
end
if oldscript == newscript then
if trace_locating then
- logs.report("fileio","old and new script are the same")
+ report_scripts("old and new script are the same")
end
elseif not find(newscript,scriptpath) then
if trace_locating then
- logs.report("fileio","new script should come from %s",scriptpath)
+ report_scripts("new script should come from %s",scriptpath)
end
elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then
if trace_locating then
- logs.report("fileio","invalid new script name")
+ report_scripts("invalid new script name")
end
else
local newdata = io.loaddata(newscript)
if newdata then
if trace_locating then
- logs.report("fileio","old script content replaced by new content")
+ report_scripts("old script content replaced by new content")
end
io.savedata(oldscript,newdata)
break
elseif trace_locating then
- logs.report("fileio","unable to load new script")
+ report_scripts("unable to load new script")
end
end
end
@@ -11609,72 +13727,133 @@ if not modules then modules = { } end modules ['data-tmf'] = {
license = "see context related readme files"
}
-local find, gsub, match = string.find, string.gsub, string.match
-local getenv, setenv = os.getenv, os.setenv
+local resolvers = resolvers
--- loads *.tmf files in minimal tree roots (to be optimized and documented)
+local report_tds = logs.reporter("resolvers","tds")
-function resolvers.check_environment(tree)
- logs.simpleline()
- setenv('TMP', getenv('TMP') or getenv('TEMP') or getenv('TMPDIR') or getenv('HOME'))
- setenv('TEXOS', getenv('TEXOS') or ("texmf-" .. os.platform))
- setenv('TEXPATH', gsub(tree or "tex","\/+$",''))
- setenv('TEXMFOS', getenv('TEXPATH') .. "/" .. getenv('TEXOS'))
- logs.simpleline()
- logs.simple("preset : TEXPATH => %s", getenv('TEXPATH'))
- logs.simple("preset : TEXOS => %s", getenv('TEXOS'))
- logs.simple("preset : TEXMFOS => %s", getenv('TEXMFOS'))
- logs.simple("preset : TMP => %s", getenv('TMP'))
- logs.simple('')
-end
+-- = <<
+-- ? ??
+-- < +=
+-- > =+
-function resolvers.load_environment(name) -- todo: key=value as well as lua
- local f = io.open(name)
- if f then
- for line in f:lines() do
- if find(line,"^[%%%#]") then
- -- skip comment
- else
- local key, how, value = match(line,"^(.-)%s*([<=>%?]+)%s*(.*)%s*$")
- if how then
- value = gsub(value,"%%(.-)%%", function(v) return getenv(v) or "" end)
- if how == "=" or how == "<<" then
- setenv(key,value)
- elseif how == "?" or how == "??" then
- setenv(key,getenv(key) or value)
- elseif how == "<" or how == "+=" then
- if getenv(key) then
- setenv(key,getenv(key) .. io.fileseparator .. value)
- else
- setenv(key,value)
- end
- elseif how == ">" or how == "=+" then
- if getenv(key) then
- setenv(key,value .. io.pathseparator .. getenv(key))
- else
- setenv(key,value)
- end
- end
- end
- end
+function resolvers.load_tree(tree,resolve)
+ if type(tree) == "string" and tree ~= "" then
+
+ local getenv, setenv = resolvers.getenv, resolvers.setenv
+
+ -- later might listen to the raw osenv var as well
+ local texos = "texmf-" .. os.platform
+
+ local oldroot = environment.texroot
+ local newroot = file.collapsepath(tree)
+
+ local newtree = file.join(newroot,texos)
+ local newpath = file.join(newtree,"bin")
+
+ if not lfs.isdir(newtree) then
+ report_tds("no '%s' under tree %s",texos,tree)
+ os.exit()
end
- f:close()
+ if not lfs.isdir(newpath) then
+ report_tds("no '%s/bin' under tree %s",texos,tree)
+ os.exit()
+ end
+
+ local texmfos = newtree
+
+ environment.texroot = newroot
+ environment.texos = texos
+ environment.texmfos = texmfos
+
+ -- Beware, we need to obey the relocatable autoparent so we
+ -- set TEXMFCNF to its raw value. This is somewhat tricky when
+ -- we run a mkii job from within. Therefore, in mtxrun, there
+ -- is a resolve applied when we're in mkii/kpse mode.
+
+ setenv('SELFAUTOPARENT', newroot)
+ setenv('SELFAUTODIR', newtree)
+ setenv('SELFAUTOLOC', newpath)
+ setenv('TEXROOT', newroot)
+ setenv('TEXOS', texos)
+ setenv('TEXMFOS', texmfos)
+ setenv('TEXMFCNF', resolvers.luacnfspec, not resolve)
+ setenv("PATH", newpath .. io.pathseparator .. getenv("PATH"))
+
+ report_tds("changing from root '%s' to '%s'",oldroot,newroot)
+ report_tds("prepending '%s' to binary path",newpath)
+ report_tds()
end
end
-function resolvers.load_tree(tree)
- if tree and tree ~= "" then
- local setuptex = 'setuptex.tmf'
- if lfs.attributes(tree, "mode") == "directory" then -- check if not nil
- setuptex = tree .. "/" .. setuptex
- else
- setuptex = tree
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-lst'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- used in mtxrun, can be loaded later .. todo
+
+local find, concat, upper, format = string.find, table.concat, string.upper, string.format
+
+resolvers.listers = resolvers.listers or { }
+
+local resolvers = resolvers
+
+local report_lists = logs.reporter("resolvers","lists")
+
+local function tabstr(str)
+ if type(str) == 'table' then
+ return concat(str," | ")
+ else
+ return str
+ end
+end
+
+function resolvers.listers.variables(pattern)
+ local instance = resolvers.instance
+ local environment = instance.environment
+ local variables = instance.variables
+ local expansions = instance.expansions
+ local pattern = upper(pattern or "")
+ local configured = { }
+ local order = instance.order
+ for i=1,#order do
+ for k, v in next, order[i] do
+ if v ~= nil and configured[k] == nil then
+ configured[k] = v
+ end
end
- if io.exists(setuptex) then
- resolvers.check_environment(tree)
- resolvers.load_environment(setuptex)
+ end
+ local env = table.fastcopy(environment)
+ local var = table.fastcopy(variables)
+ local exp = table.fastcopy(expansions)
+ for key, value in table.sortedpairs(configured) do
+ if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
+ report_lists(key)
+ report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
+ report_lists(" var: %s",tabstr(configured[key]) or "unset")
+ report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
+ report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset")
end
end
+ instance.environment = table.fastcopy(env)
+ instance.variables = table.fastcopy(var)
+ instance.expansions = table.fastcopy(exp)
+end
+
+function resolvers.listers.configurations(report)
+ local configurations = resolvers.instance.specification
+ local report = report or texio.write_nl
+ for i=1,#configurations do
+ report(resolvers.resolve(configurations[i]))
+ end
end
@@ -11691,12 +13870,18 @@ if not modules then modules = { } end modules ['luat-sta'] = {
-- this code is used in the updater
-local gmatch, match = string.gmatch, string.match
-local type = type
+local gmatch, match = string.gmatch, string.match
+local type = type
+
+states = states or { }
+local states = states
+
+states.data = states.data or { }
+local data = states.data
+
+states.hash = states.hash or { }
+local hash = states.hash
-states = states or { }
-states.data = states.data or { }
-states.hash = states.hash or { }
states.tag = states.tag or ""
states.filename = states.filename or ""
@@ -11706,7 +13891,7 @@ function states.save(filename,tag)
io.savedata(filename,
"-- generator : luat-sta.lua\n" ..
"-- state tag : " .. tag .. "\n\n" ..
- table.serialize(states.data[tag or states.tag] or {},true)
+ table.serialize(data[tag or states.tag] or {},true)
)
end
@@ -11714,11 +13899,11 @@ function states.load(filename,tag)
states.filename = filename
states.tag = tag or "whatever"
states.filename = file.addsuffix(states.filename,'lus')
- states.data[states.tag], states.hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { }
+ data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { }
end
-function states.set_by_tag(tag,key,value,default,persistent)
- local d, h = states.data[tag], states.hash[tag]
+local function set_by_tag(tag,key,value,default,persistent)
+ local d, h = data[tag], hash[tag]
if d then
if type(d) == "table" then
local dkey, hkey = key, key
@@ -11738,8 +13923,10 @@ function states.set_by_tag(tag,key,value,default,persistent)
end
dkey, hkey = post, key
end
- if type(value) == nil then
- value = value or default
+ if value == nil then
+ value = default
+ elseif value == false then
+ -- special case
elseif persistent then
value = value or d[dkey] or default
else
@@ -11748,144 +13935,202 @@ function states.set_by_tag(tag,key,value,default,persistent)
d[dkey], h[hkey] = value, value
elseif type(d) == "string" then
-- weird
- states.data[tag], states.hash[tag] = value, value
+ data[tag], hash[tag] = value, value
end
end
end
-function states.get_by_tag(tag,key,default)
- local h = states.hash[tag]
+local function get_by_tag(tag,key,default)
+ local h = hash[tag]
if h and h[key] then
return h[key]
else
- local d = states.data[tag]
+ local d = data[tag]
if d then
for k in gmatch(key,"[^%.]+") do
local dk = d[k]
- if dk then
+ if dk ~= nil then
d = dk
else
return default
end
end
- return d or default
+ if d == false then
+ return false
+ else
+ return d or default
+ end
end
end
end
+states.set_by_tag = set_by_tag
+states.get_by_tag = get_by_tag
+
function states.set(key,value,default,persistent)
- states.set_by_tag(states.tag,key,value,default,persistent)
+ set_by_tag(states.tag,key,value,default,persistent)
end
function states.get(key,default)
- return states.get_by_tag(states.tag,key,default)
-end
-
---~ states.data.update = {
---~ ["version"] = {
---~ ["major"] = 0,
---~ ["minor"] = 1,
---~ },
---~ ["rsync"] = {
---~ ["server"] = "contextgarden.net",
---~ ["module"] = "minimals",
---~ ["repository"] = "current",
---~ ["flags"] = "-rpztlv --stats",
---~ },
---~ ["tasks"] = {
---~ ["update"] = true,
---~ ["make"] = true,
---~ ["delete"] = false,
---~ },
---~ ["platform"] = {
---~ ["host"] = true,
---~ ["other"] = {
---~ ["mswin"] = false,
---~ ["linux"] = false,
---~ ["linux-64"] = false,
---~ ["osx-intel"] = false,
---~ ["osx-ppc"] = false,
---~ ["sun"] = false,
---~ },
---~ },
---~ ["context"] = {
---~ ["available"] = {"current", "beta", "alpha", "experimental"},
---~ ["selected"] = "current",
---~ },
---~ ["formats"] = {
---~ ["cont-en"] = true,
---~ ["cont-nl"] = true,
---~ ["cont-de"] = false,
---~ ["cont-cz"] = false,
---~ ["cont-fr"] = false,
---~ ["cont-ro"] = false,
---~ },
---~ ["engine"] = {
---~ ["pdftex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["pdftex"] = true,
---~ },
---~ },
---~ ["luatex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ },
---~ },
---~ ["xetex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["xetex"] = false,
---~ },
---~ },
---~ ["metapost"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["mpost"] = true,
---~ ["metafun"] = true,
---~ },
---~ },
---~ },
---~ ["fonts"] = {
---~ },
---~ ["doc"] = {
---~ },
---~ ["modules"] = {
---~ ["f-urwgaramond"] = false,
---~ ["f-urwgothic"] = false,
---~ ["t-bnf"] = false,
---~ ["t-chromato"] = false,
---~ ["t-cmscbf"] = false,
---~ ["t-cmttbf"] = false,
---~ ["t-construction-plan"] = false,
---~ ["t-degrade"] = false,
---~ ["t-french"] = false,
---~ ["t-lettrine"] = false,
---~ ["t-lilypond"] = false,
---~ ["t-mathsets"] = false,
---~ ["t-tikz"] = false,
---~ ["t-typearea"] = false,
---~ ["t-vim"] = false,
---~ },
---~ }
-
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
-
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.set_by_tag("update","rsync.server","oeps")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
+ return get_by_tag(states.tag,key,default)
+end
+
+
+
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['luat-fmt'] = {
+ version = 1.001,
+ comment = "companion to mtxrun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+
+local format = string.format
+
+local report_format = logs.reporter("resolvers","formats")
+
+-- helper for mtxrun
+
+local quoted = string.quoted
+
+local function primaryflags() -- not yet ok
+ local trackers = environment.argument("trackers")
+ local directives = environment.argument("directives")
+ local flags = ""
+ if trackers and trackers ~= "" then
+ flags = flags .. "--trackers=" .. quoted(trackers)
+ end
+ if directives and directives ~= "" then
+ flags = flags .. "--directives=" .. quoted(directives)
+ end
+ return flags
+end
+
+function environment.make_format(name)
+ -- change to format path (early as we need expanded paths)
+ local olddir = lfs.currentdir()
+ local path = caches.getwritablepath("formats") or "" -- maybe platform
+ if path ~= "" then
+ lfs.chdir(path)
+ end
+ report_format("format path: %s",lfs.currentdir())
+ -- check source file
+ local texsourcename = file.addsuffix(name,"mkiv")
+ local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
+ if fulltexsourcename == "" then
+ texsourcename = file.addsuffix(name,"tex")
+ fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
+ end
+ if fulltexsourcename == "" then
+ report_format("no tex source file with name: %s (mkiv or tex)",name)
+ lfs.chdir(olddir)
+ return
+ else
+ report_format("using tex source file: %s",fulltexsourcename)
+ end
+ local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
+ -- check specification
+ local specificationname = file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
+ if fullspecificationname == "" then
+ specificationname = file.join(texsourcepath,"context.lus")
+ fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
+ end
+ if fullspecificationname == "" then
+ report_format("unknown stub specification: %s",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath = file.dirname(fullspecificationname)
+ -- load specification
+ local usedluastub = nil
+ local usedlualibs = dofile(fullspecificationname)
+ if type(usedlualibs) == "string" then
+ usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs) == "table" then
+ report_format("using stub specification: %s",fullspecificationname)
+ local texbasename = file.basename(name)
+ local luastubname = file.addsuffix(texbasename,"lua")
+ local lucstubname = file.addsuffix(texbasename,"luc")
+ -- pack libraries in stub
+ report_format("creating initialization file: %s",luastubname)
+ utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ -- compile stub file (does not save that much as we don't use this stub at startup any more)
+ local strip = resolvers.booleanvariable("LUACSTRIP", true)
+ if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
+ report_format("using compiled initialization file: %s",lucstubname)
+ usedluastub = lucstubname
+ else
+ report_format("using uncompiled initialization file: %s",luastubname)
+ usedluastub = luastubname
+ end
+ else
+ report_format("invalid stub specification: %s",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ -- generate format
+ local command = format("luatex --ini %s --lua=%s %s %sdump",primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
+ report_format("running command: %s\n",command)
+ os.spawn(command)
+ -- remove related mem files
+ local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ -- report_format("removing related mplib format with pattern '%s'", pattern)
+ local mp = dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name = mp[i]
+ report_format("removing related mplib format %s", file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+
+function environment.run_format(name,data,more)
+ -- hm, rather old code here; we can now use the file.whatever functions
+ if name and name ~= "" then
+ local barename = file.removesuffix(name)
+ local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats")
+ if fmtname == "" then
+ fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname = resolvers.cleanpath(fmtname)
+ if fmtname == "" then
+ report_format("no format with name: %s",name)
+ else
+ local barename = file.removesuffix(name) -- expanded name
+ local luaname = file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname = file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ report_format("using format name: %s",fmtname)
+ report_format("no luc/lua with name: %s",barename)
+ else
+ local command = format("luatex %s --fmt=%s --lua=%s %s %s",primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
+ report_format("running command: %s",command)
+ os.spawn(command)
+ end
+ end
+ end
+end
end -- of closure
-- end library merge
-own = { } -- not local
+own = { } -- not local, might change
+
+own.libs = { -- order can be made better
-own.libs = { -- todo: check which ones are really needed
'l-string.lua',
'l-lpeg.lua',
'l-table.lua',
@@ -11898,39 +14143,55 @@ own.libs = { -- todo: check which ones are really needed
'l-url.lua',
'l-dir.lua',
'l-boolean.lua',
+ 'l-unicode.lua',
'l-math.lua',
--- 'l-unicode.lua',
--- 'l-tex.lua',
- 'l-utils.lua',
- 'l-aux.lua',
--- 'l-xml.lua',
- 'trac-tra.lua',
+
+ 'util-tab.lua',
+ 'util-sto.lua',
+ 'util-mrg.lua',
+ 'util-lua.lua',
+ 'util-prs.lua',
+ 'util-fmt.lua',
+ 'util-deb.lua',
+
+ 'trac-inf.lua',
+ 'trac-set.lua',
+ 'trac-log.lua',
+ 'trac-pro.lua',
+
+ 'luat-env.lua', -- can come before inf (as in mkiv)
+
'lxml-tab.lua',
'lxml-lpt.lua',
--- 'lxml-ent.lua',
+ -- 'lxml-ent.lua',
'lxml-mis.lua',
'lxml-aux.lua',
'lxml-xml.lua',
- 'luat-env.lua',
- 'trac-inf.lua',
- 'trac-log.lua',
- 'data-res.lua',
+
+ 'data-ini.lua',
+ 'data-exp.lua',
+ 'data-env.lua',
'data-tmp.lua',
+ 'data-met.lua',
+ 'data-res.lua',
'data-pre.lua',
'data-inp.lua',
'data-out.lua',
+ 'data-fil.lua',
'data-con.lua',
'data-use.lua',
-- 'data-tex.lua',
-- 'data-bin.lua',
'data-zip.lua',
+ 'data-tre.lua',
'data-crl.lua',
'data-lua.lua',
- 'data-kps.lua', -- so that we can replace kpsewhich
'data-aux.lua', -- updater
- 'data-tmf.lua', -- tree files
- -- needed ?
- 'luat-sta.lua', -- states
+ 'data-tmf.lua',
+ 'data-lst.lua',
+
+ 'luat-sta.lua',
+ 'luat-fmt.lua',
}
-- We need this hack till luatex is fixed.
@@ -11943,35 +14204,62 @@ end
-- End of hack.
-own.name = (environment and environment.ownname) or arg[0] or 'luatools.lua'
+local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match
+local concat = table.concat
+
+own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua'
+own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/")
+
+local ownpath, owntree = own.path, environment and environment.ownpath or own.path
+
+own.list = {
+ '.',
+ ownpath ,
+ ownpath .. "/../sources", -- HH's development path
+ owntree .. "/../../texmf-local/tex/context/base",
+ owntree .. "/../../texmf-context/tex/context/base",
+ owntree .. "/../../texmf-dist/tex/context/base",
+ owntree .. "/../../texmf/tex/context/base",
+ owntree .. "/../../../texmf-local/tex/context/base",
+ owntree .. "/../../../texmf-context/tex/context/base",
+ owntree .. "/../../../texmf-dist/tex/context/base",
+ owntree .. "/../../../texmf/tex/context/base",
+}
+if own.path == "." then table.remove(own.list,1) end
-own.path = string.match(own.name,"^(.+)[\\/].-$") or "."
-own.list = { '.' }
-if own.path ~= '.' then
- table.insert(own.list,own.path)
+local function locate_libs()
+ for l=1,#own.libs do
+ local lib = own.libs[l]
+ for p =1,#own.list do
+ local pth = own.list[p]
+ local filename = pth .. "/" .. lib
+ local found = lfs.isfile(filename)
+ if found then
+ package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require
+ return pth
+ end
+ end
+ end
end
-table.insert(own.list,own.path.."/../../texmf-dist/tex/context/base")
-table.insert(own.list,own.path.."/../../../tex/context/base")
-table.insert(own.list,own.path.."/mtx")
-table.insert(own.list,own.path.."/../sources")
-local function locate_libs()
- for _, lib in pairs(own.libs) do
- for _, pth in pairs(own.list) do
- local filename = string.gsub(pth .. "/" .. lib,"\\","/")
+local function load_libs()
+ local found = locate_libs()
+ if found then
+ for l=1,#own.libs do
+ local filename = found .. "/" .. own.libs[l]
local codeblob = loadfile(filename)
if codeblob then
codeblob()
- own.list = { pth } -- speed up te search
- break
end
end
+ else
+ resolvers = nil
end
end
if not resolvers then
- locate_libs()
+ load_libs()
end
if not resolvers then
@@ -11983,20 +14271,56 @@ if not resolvers then
os.exit()
end
-logs.setprogram('MTXrun',"TDS Runner Tool 1.24",environment.arguments["verbose"] or false)
+-- verbosity
-local instance = resolvers.reset()
+local e_verbose = environment.arguments["verbose"]
+
+if e_verbose then
+ trackers.enable("resolvers.locating")
+end
+
+-- some common flags (also passed through environment)
+
+local e_silent = environment.argument("silent")
+local e_noconsole = environment.argument("noconsole")
+
+local e_trackers = environment.argument("trackers")
+local e_directives = environment.argument("directives")
+local e_experiments = environment.argument("experiments")
+
+if e_silent == true then
+ e_silent = "*"
+end
-local trackspec = environment.argument("trackers") or environment.argument("track")
+if type(e_silent) == "string" then
+ if type(e_directives) == "string" then
+ e_directives = format("%s,logs.blocked={%s}",e_directives,e_silent)
+ else
+ e_directives = format("logs.blocked={%s}",e_silent)
+ end
+end
-if trackspec then
- trackers.enable(trackspec)
+if e_noconsole then
+ if type(e_directives) == "string" then
+ e_directives = format("%s,logs.target=file",e_directives)
+ else
+ e_directives = format("logs.target=file")
+ end
end
-runners = runners or { } -- global
-messages = messages or { }
+if e_trackers then trackers .enable(e_trackers) end
+if e_directives then directives .enable(e_directives) end
+if e_experiments then experiments.enable(e_experiments) end
+
+if not environment.trackers then environment.trackers = e_trackers end
+if not environment.directives then environment.directives = e_directives end
+if not environment.experiments then environment.experiments = e_experiments end
+
+--
+
+local instance = resolvers.reset()
-messages.help = [[
+local helpinfo = [[
--script run an mtx script (lua prefered method) (--noquotes), no script gives list
--execute run a script or program (texmfstart method) (--noquotes)
--resolve resolve prefixed arguments
@@ -12011,15 +14335,14 @@ messages.help = [[
--ifchanged=filename only execute when given file has changed (md checksum)
--iftouched=old,new only execute when given file has changed (time stamp)
---make create stubs for (context related) scripts
---remove remove stubs (context related) scripts
+--makestubs create stubs for (context related) scripts
+--removestubs remove stubs (context related) scripts
--stubpath=binpath paths where stubs wil be written
--windows create windows (mswin) stubs
--unix create unix (linux) stubs
--verbose give a bit more info
--trackers=list enable given trackers
---engine=str target engine
--progname=str format or backend
--edit launch editor with found file
@@ -12032,8 +14355,35 @@ messages.help = [[
--forcekpse force using kpse (handy when no mkiv and cache installed but less functionality)
--prefixes show supported prefixes
+
+--generate generate file database
+
+--variables show configuration variables
+--configurations show configuration order
+
+--expand-braces expand complex variable
+--expand-path expand variable (resolve paths)
+--expand-var expand variable (resolve references)
+--show-path show path expansion of ...
+--var-value report value of variable
+--find-file report file location
+--find-path report path of file
+
+--pattern=str filter variables
]]
+local application = logs.application {
+ name = "mtxrun",
+ banner = "ConTeXt TDS Runner Tool 1.30",
+ helpinfo = helpinfo,
+}
+
+local report = application.report
+
+messages = messages or { } -- for the moment
+
+runners = runners or { } -- global (might become local)
+
runners.applications = {
["lua"] = "luatex --luaonly",
["luc"] = "luatex --luaonly",
@@ -12056,7 +14406,7 @@ runners.registered = {
makempy = { 'makempy.pl', true },
mptopdf = { 'mptopdf.pl', true },
pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced)
--- examplex = { 'examplex.rb', false },
+ -- examplex = { 'examplex.rb', false },
concheck = { 'concheck.rb', false },
runtools = { 'runtools.rb', true },
textools = { 'textools.rb', true },
@@ -12065,9 +14415,9 @@ runners.registered = {
rlxtools = { 'rlxtools.rb', true },
pdftools = { 'pdftools.rb', true },
mpstools = { 'mpstools.rb', true },
--- exatools = { 'exatools.rb', true },
+ -- exatools = { 'exatools.rb', true },
xmltools = { 'xmltools.rb', true },
--- luatools = { 'luatools.lua', true },
+ -- luatools = { 'luatools.lua', true },
mtxtools = { 'mtxtools.rb', true },
pdftrimwhite = { 'pdftrimwhite.pl', false }
}
@@ -12086,45 +14436,39 @@ end
function runners.prepare()
local checkname = environment.argument("ifchanged")
- if checkname and checkname ~= "" then
+ if type(checkname) == "string" and checkname ~= "" then
local oldchecksum = file.loadchecksum(checkname)
local newchecksum = file.checksum(checkname)
if oldchecksum == newchecksum then
- logs.simple("file '%s' is unchanged",checkname)
+ if e_verbose then
+ report("file '%s' is unchanged",checkname)
+ end
return "skip"
- else
- logs.simple("file '%s' is changed, processing started",checkname)
+ elseif e_verbose then
+ report("file '%s' is changed, processing started",checkname)
end
file.savechecksum(checkname)
end
- local oldname, newname = string.split(environment.argument("iftouched") or "", ",")
- if oldname and newname and oldname ~= "" and newname ~= "" then
- if not file.needs_updating(oldname,newname) then
- logs.simple("file '%s' and '%s' have same age",oldname,newname)
- return "skip"
- else
- logs.simple("file '%s' is older than '%s'",oldname,newname)
- end
- end
- local tree = environment.argument('tree') or ""
- if environment.argument('autotree') then
- tree = os.getenv('TEXMFSTART_TREE') or os.getenv('TEXMFSTARTTREE') or tree
- end
- if tree and tree ~= "" then
- resolvers.load_tree(tree)
- end
- local env = environment.argument('environment') or ""
- if env and env ~= "" then
- for _,e in pairs(string.split(env)) do
- -- maybe force suffix when not given
- resolvers.load_tree(e)
+ local touchname = environment.argument("iftouched")
+ if type(touchname) == "string" and touchname ~= "" then
+ local oldname, newname = string.split(touchname, ",")
+ if oldname and newname and oldname ~= "" and newname ~= "" then
+ if not file.needs_updating(oldname,newname) then
+ if e_verbose then
+ report("file '%s' and '%s' have same age",oldname,newname)
+ end
+ return "skip"
+ elseif e_verbose then
+ report("file '%s' is older than '%s'",oldname,newname)
+ end
end
end
local runpath = environment.argument("path")
- if runpath and not lfs.chdir(runpath) then
- logs.simple("unable to change to path '%s'",runpath)
+ if type(runpath) == "string" and not lfs.chdir(runpath) then
+ report("unable to change to path '%s'",runpath)
return "error"
end
+ runners.prepare = function() end
return "run"
end
@@ -12137,8 +14481,6 @@ function runners.execute_script(fullname,internal,nosplit)
elseif state == 'skip' then
return true
elseif state == "run" then
- instance.progname = environment.argument("progname") or instance.progname
- instance.format = environment.argument("format") or instance.format
local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), ""
if path ~= "" then
result = fullname
@@ -12155,21 +14497,21 @@ function runners.execute_script(fullname,internal,nosplit)
if suffix == "" then
-- loop over known suffixes
for _,s in pairs(runners.suffixes) do
- result = resolvers.find_file(name .. "." .. s, 'texmfscripts')
+ result = resolvers.findfile(name .. "." .. s, 'texmfscripts')
if result ~= "" then
break
end
end
elseif runners.applications[suffix] then
- result = resolvers.find_file(name, 'texmfscripts')
+ result = resolvers.findfile(name, 'texmfscripts')
else
-- maybe look on path
- result = resolvers.find_file(name, 'other text files')
+ result = resolvers.findfile(name, 'other text files')
end
end
if result and result ~= "" then
if not no_split then
- local before, after = environment.split_arguments(fullname) -- already done
+ local before, after = environment.splitarguments(fullname) -- already done
environment.arguments_before, environment.arguments_after = before, after
end
if internal then
@@ -12181,12 +14523,12 @@ function runners.execute_script(fullname,internal,nosplit)
if binary and binary ~= "" then
result = binary .. " " .. result
end
- local command = result .. " " .. environment.reconstruct_commandline(environment.arguments_after,noquote)
- if logs.verbose then
- logs.simpleline()
- logs.simple("executing: %s",command)
- logs.simpleline()
- logs.simpleline()
+ local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote)
+ if e_verbose then
+ report()
+ report("executing: %s",command)
+ report()
+ report()
io.flush()
end
-- no os.exec because otherwise we get the wrong return value
@@ -12196,14 +14538,14 @@ function runners.execute_script(fullname,internal,nosplit)
else
if binary then
binary = file.addsuffix(binary,os.binsuffix)
- for p in string.gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
+ for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
if lfs.isfile(file.join(p,binary)) then
return false
end
end
- logs.simpleline()
- logs.simple("This script needs '%s' which seems not to be installed.",binary)
- logs.simpleline()
+ report()
+ report("This script needs '%s' which seems not to be installed.",binary)
+ report()
end
return false
end
@@ -12223,14 +14565,15 @@ function runners.execute_program(fullname)
elseif state == 'skip' then
return true
elseif state == "run" then
- local before, after = environment.split_arguments(fullname)
- environment.initialize_arguments(after)
+ local before, after = environment.splitarguments(fullname)
+ for k=1,#after do after[k] = resolvers.resolve(after[k]) end
+ environment.initializearguments(after)
fullname = fullname:gsub("^bin:","")
- local command = fullname .. " " .. (environment.reconstruct_commandline(after or "",noquote) or "")
- logs.simpleline()
- logs.simple("executing: %s",command)
- logs.simpleline()
- logs.simpleline()
+ local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "")
+ report()
+ report("executing: %s",command)
+ report()
+ report()
io.flush()
local code = os.exec(command) -- (fullname,unpack(after)) does not work / maybe spawn
return code == 0
@@ -12239,7 +14582,7 @@ function runners.execute_program(fullname)
return false
end
--- the --usekpse flag will fallback on kpse (hm, we can better update mtx-stubs)
+-- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs)
local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010'
local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010'
@@ -12258,22 +14601,22 @@ function runners.handle_stubs(create)
for _,v in pairs(runners.registered) do
local name, doit = v[1], v[2]
if doit then
- local base = string.gsub(file.basename(name), "%.(.-)$", "")
+ local base = gsub(file.basename(name), "%.(.-)$", "")
if create then
if windows then
- io.savedata(file.join(stubpath,base..".bat"),string.format(windows_stub,name))
- logs.simple("windows stub for '%s' created",base)
+ io.savedata(file.join(stubpath,base..".bat"),format(windows_stub,name))
+ report("windows stub for '%s' created",base)
end
if unix then
- io.savedata(file.join(stubpath,base),string.format(unix_stub,name))
- logs.simple("unix stub for '%s' created",base)
+ io.savedata(file.join(stubpath,base),format(unix_stub,name))
+ report("unix stub for '%s' created",base)
end
else
if windows and (os.remove(file.join(stubpath,base..'.bat')) or os.remove(file.join(stubpath,base..'.cmd'))) then
- logs.simple("windows stub for '%s' removed", base)
+ report("windows stub for '%s' removed", base)
end
if unix and (os.remove(file.join(stubpath,base)) or os.remove(file.join(stubpath,base..'.sh'))) then
- logs.simple("unix stub for '%s' removed",base)
+ report("unix stub for '%s' removed",base)
end
end
end
@@ -12289,7 +14632,7 @@ end
function runners.locate_file(filename)
-- differs from texmfstart where locate appends .com .exe .bat ... todo
if filename and filename ~= "" then
- runners.report_location(resolvers.find_given_file(filename))
+ runners.report_location(resolvers.findgivenfile(filename))
end
end
@@ -12298,28 +14641,28 @@ function runners.locate_platform()
end
function runners.report_location(result)
- if logs.verbose then
- logs.simpleline()
+ if e_verbose then
+ reportline()
if result and result ~= "" then
- logs.simple(result)
+ report(result)
else
- logs.simple("not found")
+ report("not found")
end
else
io.write(result)
end
end
-function runners.edit_script(filename) -- we assume that vim is present on most systems
- local editor = os.getenv("MTXRUN_EDITOR") or os.getenv("TEXMFSTART_EDITOR") or os.getenv("EDITOR") or 'vim'
+function runners.edit_script(filename) -- we assume that gvim is present on most systems (todo: also in cnf file)
+ local editor = os.getenv("MTXRUN_EDITOR") or os.getenv("TEXMFSTART_EDITOR") or os.getenv("EDITOR") or 'gvim'
local rest = resolvers.resolve(filename)
if rest ~= "" then
local command = editor .. " " .. rest
- if logs.verbose then
- logs.simpleline()
- logs.simple("starting editor: %s",command)
- logs.simple_line()
- logs.simple_line()
+ if e_verbose then
+ report()
+ report("starting editor: %s",command)
+ report()
+ report()
end
os.launch(command)
end
@@ -12362,33 +14705,33 @@ end
function runners.launch_file(filename)
instance.allresults = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
local pattern = environment.arguments["pattern"]
if not pattern or pattern == "" then
pattern = filename
end
if not pattern or pattern == "" then
- logs.simple("provide name or --pattern=")
+ report("provide name or --pattern=")
else
- local t = resolvers.find_files(pattern)
+ local t = resolvers.findfiles(pattern)
if not t or #t == 0 then
- t = resolvers.find_files("*/" .. pattern)
+ t = resolvers.findfiles("*/" .. pattern)
end
if not t or #t == 0 then
- t = resolvers.find_files("*/" .. pattern .. "*")
+ t = resolvers.findfiles("*/" .. pattern .. "*")
end
if t and #t > 0 then
if environment.arguments["all"] then
for _, v in pairs(t) do
- logs.simple("launching %s", v)
+ report("launching %s", v)
resolvers.launch(v)
end
else
- logs.simple("launching %s", t[1])
+ report("launching %s", t[1])
resolvers.launch(t[1])
end
else
- logs.simple("no match for %s", pattern)
+ report("no match for %s", pattern)
end
end
end
@@ -12421,33 +14764,45 @@ function runners.find_mtx_script(filename)
local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-"
-- context namespace, mtx-
fullname = mtxprefix .. filename
- fullname = found(fullname) or resolvers.find_file(fullname)
+ fullname = found(fullname) or resolvers.findfile(fullname)
if fullname and fullname ~= "" then
return fullname
end
-- context namespace, mtx-s
fullname = mtxprefix .. basename .. "s" .. "." .. suffix
- fullname = found(fullname) or resolvers.find_file(fullname)
+ fullname = found(fullname) or resolvers.findfile(fullname)
if fullname and fullname ~= "" then
return fullname
end
-- context namespace, mtx-
fullname = mtxprefix .. basename:gsub("s$","") .. "." .. suffix
- fullname = found(fullname) or resolvers.find_file(fullname)
+ fullname = found(fullname) or resolvers.findfile(fullname)
if fullname and fullname ~= "" then
return fullname
end
-- context namespace, just
- fullname = resolvers.find_file(filename)
+ fullname = resolvers.findfile(filename)
return fullname
end
-function runners.execute_ctx_script(filename)
+function runners.register_arguments(...)
+ local arguments = environment.arguments_after
+ local passedon = { ... }
+ for i=#passedon,1,-1 do
+ local pi = passedon[i]
+ if pi then
+ table.insert(arguments,1,pi)
+ end
+ end
+end
+
+function runners.execute_ctx_script(filename,...)
+ runners.register_arguments(...)
local arguments = environment.arguments_after
local fullname = runners.find_mtx_script(filename) or ""
if file.extname(fullname) == "cld" then
-- handy in editors where we force --autopdf
- logs.simple("running cld script: %s",filename)
+ report("running cld script: %s",filename)
table.insert(arguments,1,fullname)
table.insert(arguments,"--autopdf")
fullname = runners.find_mtx_script("context") or ""
@@ -12455,7 +14810,7 @@ function runners.execute_ctx_script(filename)
-- retry after generate but only if --autogenerate
if fullname == "" and environment.argument("autogenerate") then -- might become the default
instance.renewcache = true
- logs.setverbose(true)
+ trackers.enable("resolvers.locating")
resolvers.load()
--
fullname = runners.find_mtx_script(filename) or ""
@@ -12470,7 +14825,7 @@ function runners.execute_ctx_script(filename)
elseif state == "run" then
-- load and save ... kind of undocumented
arg = { } for _,v in pairs(arguments) do arg[#arg+1] = resolvers.resolve(v) end
- environment.initialize_arguments(arg)
+ environment.initializearguments(arg)
local loadname = environment.arguments['load']
if loadname then
if type(loadname) ~= "string" then loadname = file.basename(fullname) end
@@ -12478,8 +14833,8 @@ function runners.execute_ctx_script(filename)
runners.load_script_session(loadname)
end
filename = environment.files[1]
- if logs.verbose then
- logs.simple("using script: %s\n",fullname)
+ if e_verbose then
+ report("using script: %s\n",fullname)
end
environment.ownscript = fullname
dofile(fullname)
@@ -12495,44 +14850,42 @@ function runners.execute_ctx_script(filename)
return true
end
else
- -- logs.setverbose(true)
if filename == "" or filename == "help" then
- local context = resolvers.find_file("mtx-context.lua")
- logs.setverbose(true)
+ local context = resolvers.findfile("mtx-context.lua")
+ trackers.enable("resolvers.locating")
if context ~= "" then
- local result = dir.glob((string.gsub(context,"mtx%-context","mtx-*"))) -- () needed
+ local result = dir.glob((gsub(context,"mtx%-context","mtx-*"))) -- () needed
local valid = { }
table.sort(result)
for i=1,#result do
local scriptname = result[i]
- local scriptbase = string.match(scriptname,".*mtx%-([^%-]-)%.lua")
+ local scriptbase = match(scriptname,".*mtx%-([^%-]-)%.lua")
if scriptbase then
local data = io.loaddata(scriptname)
- local banner, version = string.match(data,"[\n\r]logs%.extendbanner%s*%(%s*[\"\']([^\n\r]+)%s*(%d+%.%d+)")
+ local banner, version = match(data,"[\n\r]logs%.extendbanner%s*%(%s*[\"\']([^\n\r]+)%s*(%d+%.%d+)")
if banner then
valid[#valid+1] = { scriptbase, version, banner }
end
end
end
if #valid > 0 then
- logs.reportbanner()
- logs.reportline()
- logs.simple("no script name given, known scripts:")
- logs.simple()
+ application.identify()
+ report("no script name given, known scripts:")
+ report()
for k=1,#valid do
local v = valid[k]
- logs.simple("%-12s %4s %s",v[1],v[2],v[3])
+ report("%-12s %4s %s",v[1],v[2],v[3])
end
end
else
- logs.simple("no script name given")
+ report("no script name given")
end
else
filename = file.addsuffix(filename,"lua")
if file.is_qualified_path(filename) then
- logs.simple("unknown script '%s'",filename)
+ report("unknown script '%s'",filename)
else
- logs.simple("unknown script '%s' or 'mtx-%s'",filename,filename)
+ report("unknown script '%s' or 'mtx-%s'",filename,filename)
end
end
return false
@@ -12540,9 +14893,9 @@ function runners.execute_ctx_script(filename)
end
function runners.prefixes()
- logs.reportbanner()
- logs.reportline()
- logs.simple(table.concat(resolvers.allprefixes(true)," "))
+ application.identify()
+ report()
+ report(concat(resolvers.allprefixes(true)," "))
end
function runners.timedrun(filename) -- just for me
@@ -12562,13 +14915,11 @@ end
local filename = environment.files[1] or ""
local ok = true
-local before, after = environment.split_arguments(filename)
+local before, after = environment.splitarguments(filename)
environment.arguments_before, environment.arguments_after = before, after
-environment.initialize_arguments(before)
+environment.initializearguments(before)
-instance.engine = environment.argument("engine") or 'luatex'
-instance.progname = environment.argument("progname") or 'context'
-instance.lsrmode = environment.argument("lsr") or false
+instance.lsrmode = environment.argument("lsr") or false
-- maybe the unset has to go to this level
@@ -12576,36 +14927,40 @@ local is_mkii_stub = runners.registered[file.removesuffix(file.basename(filename
if environment.argument("usekpse") or environment.argument("forcekpse") or is_mkii_stub then
+ resolvers.load_tree(environment.argument('tree'),true) -- force resolve of TEXMFCNF
+
os.setenv("engine","")
os.setenv("progname","")
local remapper = {
- otf = "opentype fonts",
- ttf = "truetype fonts",
- ttc = "truetype fonts",
- pfb = "type1 fonts",
+ otf = "opentype fonts",
+ ttf = "truetype fonts",
+ ttc = "truetype fonts",
+ pfb = "type1 fonts",
other = "other text files",
}
+ local progname = environment.argument("progname") or 'context'
+
local function kpse_initialized()
texconfig.kpse_init = true
local t = os.clock()
- local k = kpse.original.new("luatex",instance.progname)
+ local k = kpse.original.new("luatex",progname)
local dummy = k:find_file("mtxrun.lua") -- so that we're initialized
- logs.simple("kpse fallback with progname '%s' initialized in %s seconds",instance.progname,os.clock()-t)
+ report("kpse fallback with progname '%s' initialized in %s seconds",progname,os.clock()-t)
kpse_initialized = function() return k end
return k
end
- local find_file = resolvers.find_file
- local show_path = resolvers.show_path
+ local findfile = resolvers.findfile
+ local showpath = resolvers.showpath
if environment.argument("forcekpse") then
- function resolvers.find_file(name,kind)
- return (kpse_initialized():find_file(resolvers.clean_path(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
+ function resolvers.findfile(name,kind)
+ return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
end
- function resolvers.show_path(name)
+ function resolvers.showpath(name)
return (kpse_initialized():show_path(name)) or ""
end
@@ -12613,16 +14968,16 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
resolvers.load()
- function resolvers.find_file(name,kind)
- local found = find_file(name,kind) or ""
+ function resolvers.findfile(name,kind)
+ local found = findfile(name,kind) or ""
if found ~= "" then
return found
else
- return (kpse_initialized():find_file(resolvers.clean_path(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
+ return (kpse_initialized():find_file(resolvers.cleanpath(name),(kind ~= "" and (remapper[kind] or kind)) or "tex") or "") or ""
end
end
- function resolvers.show_path(name)
- local found = show_path(name) or ""
+ function resolvers.showpath(name)
+ local found = showpath(name) or ""
if found ~= "" then
return found
else
@@ -12632,80 +14987,317 @@ if environment.argument("usekpse") or environment.argument("forcekpse") or is_mk
end
+ function runners.loadbase()
+ end
+
else
- resolvers.load()
+ function runners.loadbase(...)
+ if not resolvers.load(...) then
+ report("forcing cache reload")
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ if not resolvers.load(...) then
+ report("the resolver databases are not present or outdated")
+ end
+ end
+ end
+
+ resolvers.load_tree(environment.argument('tree'))
end
+
if environment.argument("selfmerge") then
+
-- embed used libraries
- utils.merger.selfmerge(own.name,own.libs,own.list)
+
+ runners.loadbase()
+ local found = locate_libs()
+ if found then
+ utilities.merger.selfmerge(own.name,own.libs,{ found })
+ end
+
elseif environment.argument("selfclean") then
+
-- remove embedded libraries
- utils.merger.selfclean(own.name)
+
+ runners.loadbase()
+ utilities.merger.selfclean(own.name)
+
elseif environment.argument("selfupdate") then
- logs.setverbose(true)
- resolvers.update_script(own.name,"mtxrun")
+
+ runners.loadbase()
+ trackers.enable("resolvers.locating")
+ resolvers.updatescript(own.name,"mtxrun")
+
elseif environment.argument("ctxlua") or environment.argument("internal") then
+
-- run a script by loading it (using libs)
+
+ runners.loadbase()
ok = runners.execute_script(filename,true)
+
elseif environment.argument("script") or environment.argument("scripts") then
+
-- run a script by loading it (using libs), pass args
+
+ runners.loadbase()
if is_mkii_stub then
- -- execute mkii script
ok = runners.execute_script(filename,false,true)
else
ok = runners.execute_ctx_script(filename)
end
+
elseif environment.argument("execute") then
+
-- execute script
+
+ runners.loadbase()
ok = runners.execute_script(filename)
+
elseif environment.argument("direct") then
+
-- equals bin:
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif environment.argument("edit") then
+
-- edit file
+
+ runners.loadbase()
runners.edit_script(filename)
+
elseif environment.argument("launch") then
+
+ runners.loadbase()
runners.launch_file(filename)
-elseif environment.argument("make") then
- -- make stubs
+
+elseif environment.argument("makestubs") then
+
+ -- make stubs (depricated)
+
runners.handle_stubs(true)
-elseif environment.argument("remove") then
- -- remove stub
+
+elseif environment.argument("removestubs") then
+
+ -- remove stub (depricated)
+
+ runners.loadbase()
runners.handle_stubs(false)
+
elseif environment.argument("resolve") then
+
-- resolve string
+
+ runners.loadbase()
runners.resolve_string(filename)
+
elseif environment.argument("locate") then
+
-- locate file
+
+ runners.loadbase()
runners.locate_file(filename)
-elseif environment.argument("platform")then
+
+elseif environment.argument("platform") or environment.argument("show-platform") then
+
-- locate platform
+
+ runners.loadbase()
runners.locate_platform()
+
elseif environment.argument("prefixes") then
+
+ runners.loadbase()
runners.prefixes()
+
elseif environment.argument("timedrun") then
+
-- locate platform
+
+ runners.loadbase()
runners.timedrun(filename)
+
+elseif environment.argument("variables") or environment.argument("show-variables") or environment.argument("expansions") or environment.argument("show-expansions") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expansions",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.variables(environment.argument("pattern"))
+
+elseif environment.argument("configurations") or environment.argument("show-configurations") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--configurations",filename)
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations()
+
+elseif environment.argument("find-file") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename)
+
+ resolvers.load()
+ local e_pattern = environment.argument("pattern")
+ local e_format = environment.arguments("format")
+ if not e_pattern then
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format)
+ elseif type(e_pattern) == "string" then
+ instance.allresults = true -- brrrr
+ resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format)
+ end
+
+elseif environment.argument("find-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--find-path",filename)
+
+ resolvers.load()
+ local path = resolvers.findpath(filename, instance.my_format)
+ if e_verbose then
+ report(path)
+ else
+ print(path)
+ end
+
+elseif environment.argument("expand-braces") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expandbraces, environment.files)
+
+elseif environment.argument("expand-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expandpath, environment.files)
+
+elseif environment.argument("expand-var") or environment.argument("expand-variable") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--expand-var",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.expansion, environment.files)
+
+elseif environment.argument("show-path") or environment.argument("path-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-path",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.showpath, environment.files)
+
+elseif environment.argument("var-value") or environment.argument("show-value") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--show-value",filename)
+
+ resolvers.load("nofiles")
+ runners.register_arguments(filename)
+ environment.initializearguments(environment.arguments_after)
+ resolvers.dowithfilesandreport(resolvers.variable,environment.files)
+
+elseif environment.argument("format-path") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--format-path",filename)
+
+ resolvers.load()
+ report(caches.getwritablepath("format"))
+
+elseif environment.argument("pattern") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--pattern='" .. environment.argument("pattern") .. "'",filename)
+
+elseif environment.argument("generate") then
+
+ -- luatools
+
+ instance.renewcache = true
+ trackers.enable("resolvers.locating")
+ resolvers.load()
+
+elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then
+
+ -- luatools: runners.execute_ctx_script("mtx-base","--make",filename)
+
+ resolvers.load()
+ trackers.enable("resolvers.locating")
+ environment.make_format(filename)
+
+elseif environment.argument("run") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--run",filename)
+
+elseif environment.argument("fmt") then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--fmt",filename)
+
+elseif environment.argument("help") and filename=='base' then
+
+ -- luatools
+
+ runners.execute_ctx_script("mtx-base","--help")
+
elseif environment.argument("help") or filename=='help' or filename == "" then
- logs.help(messages.help)
- -- execute script
+
+ application.help()
+
elseif filename:find("^bin:") then
+
+ runners.loadbase()
ok = runners.execute_program(filename)
+
elseif is_mkii_stub then
+
-- execute mkii script
+
+ runners.loadbase()
ok = runners.execute_script(filename,false,true)
-else
+
+elseif false then
+
+ runners.loadbase()
ok = runners.execute_ctx_script(filename)
if not ok then
ok = runners.execute_script(filename)
end
+
+elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to load mtx-base
+
+ resolvers.load("nofiles")
+ resolvers.listers.configurations()
+
+else
+
+ runners.loadbase()
+ runners.execute_ctx_script("mtx-base",filename)
+
+end
+
+if e_verbose then
+ report()
+ report("runtime: %0.3f seconds",os.runtime())
end
-if os.platform == "unix" then
- io.write("\n")
+if os.type ~= "windows" then
+ texio.write("\n") -- is this still valid?
end
if ok == false then ok = 1 elseif ok == true then ok = 0 end
--
cgit v1.2.3