summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex/context/base/data-res.lua
diff options
context:
space:
mode:
authorTaco Hoekwater <taco@elvenkind.com>2011-06-01 08:54:21 +0000
committerTaco Hoekwater <taco@elvenkind.com>2011-06-01 08:54:21 +0000
commitd7ccb42582f85acf30568913610ccf4d602023fb (patch)
tree7292e3545a420676878e7451b68892d360c62cb6 /Master/texmf-dist/tex/context/base/data-res.lua
parent2d62a6fe9b80def59c392268022f1f9a2d6e358f (diff)
commit context 2011.05.18
git-svn-id: svn://tug.org/texlive/trunk@22719 c570f23f-e606-0410-a88d-b1316a301751
Diffstat (limited to 'Master/texmf-dist/tex/context/base/data-res.lua')
-rw-r--r--Master/texmf-dist/tex/context/base/data-res.lua2595
1 files changed, 1065 insertions, 1530 deletions
diff --git a/Master/texmf-dist/tex/context/base/data-res.lua b/Master/texmf-dist/tex/context/base/data-res.lua
index ac5177f4a24..842eed16940 100644
--- a/Master/texmf-dist/tex/context/base/data-res.lua
+++ b/Master/texmf-dist/tex/context/base/data-res.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['data-inp'] = {
+if not modules then modules = { } end modules ['data-res'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,220 +6,218 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files",
}
--- After a few years using the code the large luat-inp.lua file
--- has been split up a bit. In the process some functionality was
--- dropped:
---
--- * support for reading lsr files
--- * selective scanning (subtrees)
--- * some public auxiliary functions were made private
---
--- TODO: os.getenv -> os.env[]
--- TODO: instances.[hashes,cnffiles,configurations,522]
--- TODO: check escaping in find etc, too much, too slow
-
--- This lib is multi-purpose and can be loaded again later on so that
--- additional functionality becomes available. We will split thislogs.report("fileio",
--- module in components once we're done with prototyping. This is the
--- first code I wrote for LuaTeX, so it needs some cleanup. Before changing
--- something in this module one can best check with Taco or Hans first; there
--- is some nasty trickery going on that relates to traditional kpse support.
-
--- To be considered: hash key lowercase, first entry in table filename
--- (any case), rest paths (so no need for optimization). Or maybe a
--- separate table that matches lowercase names to mixed case when
--- present. In that case the lower() cases can go away. I will do that
--- only when we run into problems with names ... well ... Iwona-Regular.
-
--- Beware, loading and saving is overloaded in luat-tmp!
+-- In practice we will work within one tds tree, but i want to keep
+-- the option open to build tools that look at multiple trees, which is
+-- why we keep the tree specific data in a table. We used to pass the
+-- instance but for practical purposes we now avoid this and use a
+-- instance variable. We always have one instance active (sort of global).
+
+-- todo: cache:/// home:///
local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
-local next, type = next, type
-local lpegmatch = lpeg.match
-
-local trace_locating, trace_detail, trace_expansions = false, false, false
-
-trackers.register("resolvers.locating", function(v) trace_locating = v end)
-trackers.register("resolvers.details", function(v) trace_detail = v end)
-trackers.register("resolvers.expansions", function(v) trace_expansions = v end) -- todo
-
-if not resolvers then
- resolvers = {
- suffixes = { },
- formats = { },
- dangerous = { },
- suffixmap = { },
- alternatives = { },
- locators = { }, -- locate databases
- hashers = { }, -- load databases
- generators = { }, -- generate databases
- }
-end
+local next, type, rawget = next, type, rawget
+local os = os
+
+local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+
+local filedirname = file.dirname
+local filebasename = file.basename
+local fileextname = file.extname
+local filejoin = file.join
+local collapsepath = file.collapsepath
+local joinpath = file.joinpath
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end)
+local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end)
+
+local report_resolving = logs.reporter("resolvers","resolving")
local resolvers = resolvers
-resolvers.locators .notfound = { nil }
-resolvers.hashers .notfound = { nil }
-resolvers.generators.notfound = { nil }
-
-resolvers.cacheversion = '1.0.1'
-resolvers.cnfname = 'texmf.cnf'
-resolvers.luaname = 'texmfcnf.lua'
-resolvers.homedir = os.env[os.type == "windows" and 'USERPROFILE'] or os.env['HOME'] or '~'
-resolvers.cnfdefault = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}'
-
-local dummy_path_expr = "^!*unset/*$"
-
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
-local alternatives = resolvers.alternatives
-
-formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' }
-formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' }
-formats['fmt'] = 'TEXFORMATS' suffixes['fmt'] = { 'fmt' }
-formats['map'] = 'TEXFONTMAPS' suffixes['map'] = { 'map' }
-formats['mp'] = 'MPINPUTS' suffixes['mp'] = { 'mp' }
-formats['ocp'] = 'OCPINPUTS' suffixes['ocp'] = { 'ocp' }
-formats['ofm'] = 'OFMFONTS' suffixes['ofm'] = { 'ofm', 'tfm' }
-formats['otf'] = 'OPENTYPEFONTS' suffixes['otf'] = { 'otf' } -- 'ttf'
-formats['opl'] = 'OPLFONTS' suffixes['opl'] = { 'opl' }
-formats['otp'] = 'OTPINPUTS' suffixes['otp'] = { 'otp' }
-formats['ovf'] = 'OVFFONTS' suffixes['ovf'] = { 'ovf', 'vf' }
-formats['ovp'] = 'OVPFONTS' suffixes['ovp'] = { 'ovp' }
-formats['tex'] = 'TEXINPUTS' suffixes['tex'] = { 'tex' }
-formats['tfm'] = 'TFMFONTS' suffixes['tfm'] = { 'tfm' }
-formats['ttf'] = 'TTFONTS' suffixes['ttf'] = { 'ttf', 'ttc', 'dfont' }
-formats['pfb'] = 'T1FONTS' suffixes['pfb'] = { 'pfb', 'pfa' }
-formats['vf'] = 'VFFONTS' suffixes['vf'] = { 'vf' }
-
-formats['fea'] = 'FONTFEATURES' suffixes['fea'] = { 'fea' }
-formats['cid'] = 'FONTCIDMAPS' suffixes['cid'] = { 'cid', 'cidmap' }
-
-formats ['texmfscripts'] = 'TEXMFSCRIPTS' -- new
-suffixes['texmfscripts'] = { 'rb', 'pl', 'py' } -- 'lua'
-
-formats ['lua'] = 'LUAINPUTS' -- new
-suffixes['lua'] = { 'lua', 'luc', 'tma', 'tmc' }
-
--- backward compatible ones
-
-alternatives['map files'] = 'map'
-alternatives['enc files'] = 'enc'
-alternatives['cid maps'] = 'cid' -- great, why no cid files
-alternatives['font feature files'] = 'fea' -- and fea files here
-alternatives['opentype fonts'] = 'otf'
-alternatives['truetype fonts'] = 'ttf'
-alternatives['truetype collections'] = 'ttc'
-alternatives['truetype dictionary'] = 'dfont'
-alternatives['type1 fonts'] = 'pfb'
-
--- obscure ones
-
-formats ['misc fonts'] = ''
-suffixes['misc fonts'] = { }
-
-formats ['sfd'] = 'SFDFONTS'
-suffixes ['sfd'] = { 'sfd' }
-alternatives['subfont definition files'] = 'sfd'
-
--- lib paths
-
-formats ['lib'] = 'CLUAINPUTS' -- new (needs checking)
-suffixes['lib'] = (os.libsuffix and { os.libsuffix }) or { 'dll', 'so' }
+local expandedpathfromlist = resolvers.expandedpathfromlist
+local checkedvariable = resolvers.checkedvariable
+local splitconfigurationpath = resolvers.splitconfigurationpath
+local methodhandler = resolvers.methodhandler
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical pusposes we now avoid this and use a
--- instance variable.
+local initializesetter = utilities.setters.initialize
--- here we catch a few new thingies (todo: add these paths to context.tmf)
---
--- FONTFEATURES = .;$TEXMF/fonts/fea//
--- FONTCIDMAPS = .;$TEXMF/fonts/cid//
+local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv
--- we always have one instance active
+resolvers.cacheversion = '1.0.1'
+resolvers.configbanner = ''
+resolvers.homedir = environment.homedir
+resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" }
+resolvers.luacnfname = 'texmfcnf.lua'
+resolvers.luacnfstate = "unknown"
-resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
+-- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- what a rubish path
+resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c},}}'
-function resolvers.newinstance()
+--~ -- not yet, some reporters expect strings
- -- store once, freeze and faster (once reset we can best use
- -- instance.environment) maybe better have a register suffix
- -- function
+--~ resolvers.luacnfspec = {
+--~ "selfautoparent:/texmf-local",
+--~ "selfautoparent:/texmf-local/web2c",
+--~ "selfautoparent:/texmf",
+--~ "selfautoparent:/texmf/web2c",
+--~ "selfautoparent:",
+--~ }
- for k, v in next, suffixes do
- for i=1,#v do
- local vi = v[i]
- if vi then
- suffixmap[vi] = k
- end
- end
+local unset_variable = "unset"
+
+local formats = resolvers.formats
+local suffixes = resolvers.suffixes
+local dangerous = resolvers.dangerous
+local suffixmap = resolvers.suffixmap
+
+resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
+
+resolvers.instance = resolvers.instance or nil -- the current one (slow access)
+local instance = resolvers.instance or nil -- the current one (fast access)
+
+-- An instance has an environment (coming from the outside, kept raw), variables
+-- (coming from the configuration file), and expansions (variables with nested
+-- variables replaced). One can push something into the outer environment and
+-- its internal copy, but only the later one will be the raw unprefixed variant.
+
+function resolvers.setenv(key,value,raw)
+ if instance then
+ -- this one will be consulted first when we stay inside
+ -- the current environment
+ instance.environment[key] = value
+ -- we feed back into the environment, and as this is used
+ -- by other applications (via os.execute) we need to make
+ -- sure that prefixes are resolve
+ ossetenv(key,raw and value or resolvers.resolve(value))
end
+end
- -- because vf searching is somewhat dangerous, we want to prevent
- -- too liberal searching esp because we do a lookup on the current
- -- path anyway; only tex (or any) is safe
+-- Beware we don't want empty here as this one can be called early on
+-- and therefore we use rawget.
- for k, v in next, formats do
- dangerous[k] = true
+local function getenv(key)
+ local value = rawget(instance.environment,key)
+ if value and value ~= "" then
+ return value
+ else
+ local e = osgetenv(key)
+ return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
- dangerous.tex = nil
+end
+
+resolvers.getenv = getenv
+resolvers.env = getenv
- -- the instance
+-- We are going to use some metatable trickery where we backtrack from
+-- expansion to variable to environment.
+
+local function resolve(k)
+ return instance.expansions[k]
+end
+
+local dollarstripper = lpeg.stripper("$")
+local inhibitstripper = P("!")^0 * Cs(P(1)^0)
+local backslashswapper = lpeg.replacer("\\","/")
+
+local somevariable = P("$") / ""
+local somekey = C(R("az","AZ","09","__","--")^1)
+local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
+ + P(";") * (P(";") / "")
+ + P(1)
+local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
+
+local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
+local variablecleaner = Cs((cleaner + P(1))^0)
+
+local somevariable = R("az","AZ","09","__","--")^1 / resolve
+local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/""))
+local variableresolver = Cs((variable + P(1))^0)
+
+local function expandedvariable(var)
+ return lpegmatch(variableexpander,var) or var
+end
+
+function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only
+
+ if trace_locating then
+ report_resolving("creating instance")
+ end
+
+ local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate()
local newinstance = {
- rootpath = '',
- treepath = '',
- progname = 'context',
- engine = 'luatex',
- format = '',
- environment = { },
- variables = { },
- expansions = { },
- files = { },
- remap = { },
- configuration = { },
- setup = { },
- order = { },
- found = { },
- foundintrees = { },
- kpsevars = { },
- hashes = { },
- cnffiles = { },
- luafiles = { },
- lists = { },
+ environment = environment,
+ variables = variables,
+ expansions = expansions,
+ order = order,
+ files = allocate(),
+ setups = allocate(),
+ found = allocate(),
+ foundintrees = allocate(),
+ hashes = allocate(),
+ hashed = allocate(),
+ specification = allocate(),
+ lists = allocate(),
+ data = allocate(), -- only for loading
+ fakepaths = allocate(),
remember = true,
diskcache = true,
renewcache = false,
- scandisk = true,
- cachepath = nil,
loaderror = false,
- sortdata = false,
savelists = true,
- cleanuppaths = true,
- allresults = false,
pattern = nil, -- lists
- data = { }, -- only for loading
force_suffixes = true,
- fakepaths = { },
}
- local ne = newinstance.environment
+ setmetatableindex(variables,function(t,k)
+ local v
+ for i=1,#order do
+ v = order[i][k]
+ if v ~= nil then
+ t[k] = v
+ return v
+ end
+ end
+ if v == nil then
+ v = ""
+ end
+ t[k] = v
+ return v
+ end)
- for k,v in next, os.env do
- ne[k] = resolvers.bare_variable(v)
- end
+ setmetatableindex(environment, function(t,k)
+ local v = osgetenv(k)
+ if v == nil then
+ v = variables[k]
+ end
+ if v ~= nil then
+ v = checkedvariable(v) or ""
+ end
+ v = resolvers.repath(v) -- for taco who has a : separated osfontdir
+ t[k] = v
+ return v
+ end)
+
+ setmetatableindex(expansions, function(t,k)
+ local v = environment[k]
+ if type(v) == "string" then
+ v = lpegmatch(variableresolver,v)
+ v = lpegmatch(variablecleaner,v)
+ end
+ t[k] = v
+ return v
+ end)
return newinstance
end
-function resolvers.setinstance(someinstance)
+function resolvers.setinstance(someinstance) -- only one instance is active
instance = someinstance
resolvers.instance = someinstance
return someinstance
@@ -234,501 +232,271 @@ local function reset_hashes()
instance.found = { }
end
-local function check_configuration() -- not yet ok, no time for debugging now
- if os.env["OSFONTDIR"] then
- -- ok
- elseif os.type == "windows" then
- os.setenv("OSFONTDIR","c:/windows/fonts//")
- elseif os.type == "macosx" then
- os.setenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//")
- end
-end
-
-function resolvers.bare_variable(str) -- assumes str is a string
- return (gsub(str,"\s*([\"\']?)(.+)%1\s*", "%2"))
-end
-
-function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail'
- if n then
- trackers.disable("resolvers.*")
- trackers.enable("resolvers."..n)
- end
-end
-
-resolvers.settrace(os.getenv("MTX_INPUT_TRACE"))
-
-function resolvers.osenv(key)
- local ie = instance.environment
- local value = ie[key]
- if value == nil then
- -- local e = os.getenv(key)
- local e = os.env[key]
- if e == nil then
- -- value = "" -- false
- else
- value = resolvers.bare_variable(e)
- end
- ie[key] = value
- end
- return value or ""
-end
-
-function resolvers.env(key)
- return instance.environment[key] or resolvers.osenv(key)
-end
-
---
-
-local function expand_vars(lst) -- simple vars
- local variables, env = instance.variables, resolvers.env
- local function resolve(a)
- return variables[a] or env(a)
- end
- for k=1,#lst do
- lst[k] = gsub(lst[k],"%$([%a%d%_%-]+)",resolve)
- end
-end
+local slash = P("/")
-local function expanded_var(var) -- simple vars
- local function resolve(a)
- return instance.variables[a] or resolvers.env(a)
- end
- return (gsub(var,"%$([%a%d%_%-]+)",resolve))
-end
+local pathexpressionpattern = Cs (
+ Cc("^") * (
+ Cc("%") * S(".-")
+ + slash^2 * P(-1) / "/.*"
+ + slash^2 / "/.-/"
+ + (1-slash) * P(-1) * Cc("/")
+ + P(1)
+ )^1 * Cc("$") -- yes or no $
+)
-local function entry(entries,name)
- if name and (name ~= "") then
- name = gsub(name,'%$','')
- local result = entries[name..'.'..instance.progname] or entries[name]
- if result then
- return result
- else
- result = resolvers.env(name)
- if result then
- instance.variables[name] = result
- resolvers.expand_variables()
- return instance.expansions[name] or ""
- end
- end
- end
- return ""
-end
+local cache = { }
-local function is_entry(entries,name)
- if name and name ~= "" then
- name = gsub(name,'%$','')
- return (entries[name..'.'..instance.progname] or entries[name]) ~= nil
+local function makepathexpression(str)
+ if str == "." then
+ return "^%./$"
else
- return false
- end
-end
-
--- {a,b,c,d}
--- a,b,c/{p,q,r},d
--- a,b,c/{p,q,r}/d/{x,y,z}//
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a,b,c/{p,q/{x,y,z},r},d/{p,q,r}
--- a{b,c}{d,e}f
--- {a,b,c,d}
--- {a,b,c/{p,q,r},d}
--- {a,b,c/{p,q,r}/d/{x,y,z}//}
--- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}}
--- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
--- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
-
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local function do_first(a,b)
- local t = { }
- for s in gmatch(b,"[^,]+") do t[#t+1] = a .. s end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_second(a,b)
- local t = { }
- for s in gmatch(a,"[^,]+") do t[#t+1] = s .. b end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_both(a,b)
- local t = { }
- for sa in gmatch(a,"[^,]+") do
- for sb in gmatch(b,"[^,]+") do
- t[#t+1] = sa .. sb
+ local c = cache[str]
+ if not c then
+ c = lpegmatch(pathexpressionpattern,str)
+ cache[str] = c
end
+ return c
end
- return "{" .. concat(t,",") .. "}"
-end
-
-local function do_three(a,b,c)
- return a .. b.. c
end
-local function splitpathexpr(str, t, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
- if trace_expansions then
- logs.report("fileio","expanding variable '%s'",str)
- end
- t = t or { }
- str = gsub(str,",}",",@}")
- str = gsub(str,"{,","{@,")
- -- str = "@" .. str .. "@"
- local ok, done
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
- str = gsub(str,"[{}]", "")
- str = gsub(str,"@","")
- if validate then
- for s in gmatch(str,"[^,]+") do
- s = validate(s)
- if s then t[#t+1] = s end
- end
- else
- for s in gmatch(str,"[^,]+") do
- t[#t+1] = s
- end
- end
- if trace_expansions then
- for k=1,#t do
- logs.report("fileio","% 4i: %s",k,t[k])
+local function reportcriticalvariables()
+ if trace_locating then
+ for i=1,#resolvers.criticalvars do
+ local k = resolvers.criticalvars[i]
+ local v = resolvers.getenv(k) or "unknown" -- this one will not resolve !
+ report_resolving("variable '%s' set to '%s'",k,v)
end
+ report_resolving()
end
- return t
+ reportcriticalvariables = function() end
end
-local function expanded_path_from_list(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
- for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- local function validate(s)
- s = file.collapse_path(s)
- return s ~= "" and not find(s,dummy_path_expr) and s
- end
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = file.collapse_path(p)
- if p ~= "" then newlist[#newlist+1] = p end
+local function identify_configuration_files()
+ local specification = instance.specification
+ if #specification == 0 then
+ local cnfspec = getenv('TEXMFCNF')
+ if cnfspec == "" then
+ cnfspec = resolvers.luacnfspec
+ resolvers.luacnfstate = "default"
+ else
+ resolvers.luacnfstate = "environment"
+ end
+ reportcriticalvariables()
+ local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec))
+ local luacnfname = resolvers.luacnfname
+ for i=1,#cnfpaths do
+ local filename = collapsepath(filejoin(cnfpaths[i],luacnfname))
+ local realname = resolvers.resolve(filename)
+ if lfs.isfile(realname) then
+ specification[#specification+1] = filename
+ if trace_locating then
+ report_resolving("found configuration file '%s'",realname)
+ end
+ elseif trace_locating then
+ report_resolving("unknown configuration file '%s'",realname)
end
end
- end
- return newlist
-end
-
--- we follow a rather traditional approach:
---
--- (1) texmf.cnf given in TEXMFCNF
--- (2) texmf.cnf searched in default variable
---
--- also we now follow the stupid route: if not set then just assume *one*
--- cnf file under texmf (i.e. distribution)
-
-local args = environment and environment.original_arguments or arg -- this needs a cleanup
-
-resolvers.ownbin = resolvers.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex"
-resolvers.ownbin = gsub(resolvers.ownbin,"\\","/")
-
-function resolvers.getownpath()
- local ownpath = resolvers.ownpath or os.selfdir
- if not ownpath or ownpath == "" or ownpath == "unset" then
- ownpath = args[-1] or arg[-1]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- if not ownpath or ownpath == "" then
- ownpath = args[-0] or arg[-0]
- ownpath = ownpath and file.dirname(gsub(ownpath,"\\","/"))
- end
- local binary = resolvers.ownbin
- if not ownpath or ownpath == "" then
- ownpath = ownpath and file.dirname(binary)
+ if trace_locating then
+ report_resolving()
end
- if not ownpath or ownpath == "" then
- if os.binsuffix ~= "" then
- binary = file.replacesuffix(binary,os.binsuffix)
- end
- for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do
- local b = file.join(p,binary)
- if lfs.isfile(b) then
- -- we assume that after changing to the path the currentdir function
- -- resolves to the real location and use this side effect here; this
- -- trick is needed because on the mac installations use symlinks in the
- -- path instead of real locations
- local olddir = lfs.currentdir()
- if lfs.chdir(p) then
- local pp = lfs.currentdir()
- if trace_locating and p ~= pp then
- logs.report("fileio","following symlink '%s' to '%s'",p,pp)
+ elseif trace_locating then
+ report_resolving("configuration files already identified")
+ end
+end
+
+local function load_configuration_files()
+ local specification = instance.specification
+ if #specification > 0 then
+ local luacnfname = resolvers.luacnfname
+ for i=1,#specification do
+ local filename = specification[i]
+ local pathname = filedirname(filename)
+ local filename = filejoin(pathname,luacnfname)
+ local realname = resolvers.resolve(filename) -- no shortcut
+ local blob = loadfile(realname)
+ if blob then
+ local setups = instance.setups
+ local data = blob()
+ data = data and data.content
+ if data then
+ if trace_locating then
+ report_resolving("loading configuration file '%s'",filename)
+ report_resolving()
+ end
+ local variables = data.variables or { }
+ local warning = false
+ for k, v in next, data do
+ local variant = type(v)
+ if variant == "table" then
+ initializesetter(filename,k,v)
+ elseif variables[k] == nil then
+ if trace_locating and not warning then
+ report_resolving("variables like '%s' in configuration file '%s' should move to the 'variables' subtable",
+ k,resolvers.resolve(filename))
+ warning = true
+ end
+ variables[k] = v
end
- ownpath = pp
- lfs.chdir(olddir)
- else
- if trace_locating then
- logs.report("fileio","unable to check path '%s'",p)
+ end
+ setups[pathname] = variables
+ if resolvers.luacnfstate == "default" then
+ -- the following code is not tested
+ local cnfspec = variables["TEXMFCNF"]
+ if cnfspec then
+ if trace_locating then
+ report_resolving("reloading configuration due to TEXMF redefinition")
+ end
+ -- we push the value into the main environment (osenv) so
+ -- that it takes precedence over the default one and therefore
+ -- also over following definitions
+ resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes
+ -- we now identify and load the specified configuration files
+ instance.specification = { }
+ identify_configuration_files()
+ load_configuration_files()
+ -- we prevent further overload of the configuration variable
+ resolvers.luacnfstate = "configuration"
+ -- we quit the outer loop
+ break
end
- ownpath = p
end
- break
+
+ else
+ if trace_locating then
+ report_resolving("skipping configuration file '%s' (no content)",filename)
+ end
+ setups[pathname] = { }
+ instance.loaderror = true
end
+ elseif trace_locating then
+ report_resolving("skipping configuration file '%s' (no file)",filename)
+ end
+ instance.order[#instance.order+1] = instance.setups[pathname]
+ if instance.loaderror then
+ break
end
end
- if not ownpath or ownpath == "" then
- ownpath = "."
- logs.report("fileio","forcing fallback ownpath .")
- elseif trace_locating then
- logs.report("fileio","using ownpath '%s'",ownpath)
- end
- end
- resolvers.ownpath = ownpath
- function resolvers.getownpath()
- return resolvers.ownpath
+ elseif trace_locating then
+ report_resolving("warning: no lua configuration files found")
end
- return ownpath
end
-local own_places = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF" }
+-- scheme magic ... database loading
-local function identify_own()
- local ownpath = resolvers.getownpath() or dir.current()
- local ie = instance.environment
- if ownpath then
- if resolvers.env('SELFAUTOLOC') == "" then os.env['SELFAUTOLOC'] = file.collapse_path(ownpath) end
- if resolvers.env('SELFAUTODIR') == "" then os.env['SELFAUTODIR'] = file.collapse_path(ownpath .. "/..") end
- if resolvers.env('SELFAUTOPARENT') == "" then os.env['SELFAUTOPARENT'] = file.collapse_path(ownpath .. "/../..") end
- else
- logs.report("fileio","error: unable to locate ownpath")
- os.exit()
- end
- if resolvers.env('TEXMFCNF') == "" then os.env['TEXMFCNF'] = resolvers.cnfdefault end
- if resolvers.env('TEXOS') == "" then os.env['TEXOS'] = resolvers.env('SELFAUTODIR') end
- if resolvers.env('TEXROOT') == "" then os.env['TEXROOT'] = resolvers.env('SELFAUTOPARENT') end
- if trace_locating then
- for i=1,#own_places do
- local v = own_places[i]
- logs.report("fileio","variable '%s' set to '%s'",v,resolvers.env(v) or "unknown")
- end
- end
- identify_own = function() end
-end
-
-function resolvers.identify_cnf()
- if #instance.cnffiles == 0 then
- -- fallback
- identify_own()
- -- the real search
- resolvers.expand_variables()
- local t = resolvers.split_path(resolvers.env('TEXMFCNF'))
- t = expanded_path_from_list(t)
- expand_vars(t) -- redundant
- local function locate(filename,list)
- for i=1,#t do
- local ti = t[i]
- local texmfcnf = file.collapse_path(file.join(ti,filename))
- if lfs.isfile(texmfcnf) then
- list[#list+1] = texmfcnf
- end
- end
+local function load_file_databases()
+ instance.loaderror, instance.files = false, allocate()
+ if not instance.renewcache then
+ local hashes = instance.hashes
+ for k=1,#hashes do
+ local hash = hashes[k]
+ resolvers.hashers.byscheme(hash.type,hash.name)
+ if instance.loaderror then break end
end
- locate(resolvers.luaname,instance.luafiles)
- locate(resolvers.cnfname,instance.cnffiles)
end
end
-local function load_cnf_file(fname)
- fname = resolvers.clean_path(fname)
- local lname = file.replacesuffix(fname,'lua')
- if lfs.isfile(lname) then
- local dname = file.dirname(fname) -- fname ?
- if not instance.configuration[dname] then
- resolvers.load_data(dname,'configuration',lname and file.basename(lname))
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- else
- f = io.open(fname)
- if f then
- if trace_locating then
- logs.report("fileio","loading configuration file %s", fname)
- end
- local line, data, n, k, v
- local dname = file.dirname(fname)
- if not instance.configuration[dname] then
- instance.configuration[dname] = { }
- instance.order[#instance.order+1] = instance.configuration[dname]
- end
- local data = instance.configuration[dname]
- while true do
- local line, n = f:read(), 0
- if line then
- while true do -- join lines
- line, n = gsub(line,"\\%s*$", "")
- if n > 0 then
- line = line .. f:read()
- else
- break
- end
- end
- if not find(line,"^[%%#]") then
- local l = gsub(line,"%s*%%.*$","")
- local k, v = match(l,"%s*(.-)%s*=%s*(.-)%s*$")
- if k and v and not data[k] then
- v = gsub(v,"[%%#].*",'')
- data[k] = gsub(v,"~","$HOME")
- instance.kpsevars[k] = true
- end
+local function locate_file_databases()
+ -- todo: cache:// and tree:// (runtime)
+ local texmfpaths = resolvers.expandedpathlist('TEXMF')
+ if #texmfpaths > 0 then
+ for i=1,#texmfpaths do
+ local path = collapsepath(texmfpaths[i])
+ local stripped = lpegmatch(inhibitstripper,path) -- the !! thing
+ if stripped ~= "" then
+ local runtime = stripped == path
+ path = resolvers.cleanpath(path)
+ local spec = resolvers.splitmethod(stripped)
+ if spec.scheme == "cache" or spec.scheme == "file" then
+ stripped = spec.path
+ elseif runtime and (spec.noscheme or spec.scheme == "file") then
+ stripped = "tree:///" .. stripped
+ end
+ if trace_locating then
+ if runtime then
+ report_resolving("locating list of '%s' (runtime)",path)
+ else
+ report_resolving("locating list of '%s' (cached)",path)
end
- else
- break
end
+ methodhandler('locators',stripped)
end
- f:close()
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'", fname)
end
- end
-end
-
-local function collapse_cnf_data() -- potential optimization: pass start index (setup and configuration are shared)
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do
- if not instance.variables[k] then
- if instance.environment[k] then
- instance.variables[k] = instance.environment[k]
- else
- instance.kpsevars[k] = true
- instance.variables[k] = resolvers.bare_variable(v)
- end
- end
+ if trace_locating then
+ report_resolving()
end
+ elseif trace_locating then
+ report_resolving("no texmf paths are defined (using TEXMF)")
end
end
-function resolvers.load_cnf()
- local function loadoldconfigdata()
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- load_cnf_file(cnffiles[i])
- end
+local function generate_file_databases()
+ local hashes = instance.hashes
+ for k=1,#hashes do
+ local hash = hashes[k]
+ methodhandler('generators',hash.name)
end
- -- instance.cnffiles contain complete names now !
- -- we still use a funny mix of cnf and new but soon
- -- we will switch to lua exclusively as we only use
- -- the file to collect the tree roots
- if #instance.cnffiles == 0 then
- if trace_locating then
- logs.report("fileio","no cnf files found (TEXMFCNF may not be set/known)")
- end
- else
- local cnffiles = instance.cnffiles
- instance.rootpath = cnffiles[1]
- for k=1,#cnffiles do
- instance.cnffiles[k] = file.collapse_path(cnffiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
- end
- instance.rootpath = file.collapse_path(instance.rootpath)
- if instance.diskcache and not instance.renewcache then
- resolvers.loadoldconfig(instance.cnffiles)
- if instance.loaderror then
- loadoldconfigdata()
- resolvers.saveoldconfig()
- end
- else
- loadoldconfigdata()
- if instance.renewcache then
- resolvers.saveoldconfig()
- end
- end
- collapse_cnf_data()
+ if trace_locating then
+ report_resolving()
end
- check_configuration()
end
-function resolvers.load_lua()
- if #instance.luafiles == 0 then
- -- yet harmless
- else
- instance.rootpath = instance.luafiles[1]
- local luafiles = instance.luafiles
- for k=1,#luafiles do
- instance.luafiles[k] = file.collapse_path(luafiles[k])
- end
- for i=1,3 do
- instance.rootpath = file.dirname(instance.rootpath)
+local function save_file_databases() -- will become cachers
+ for i=1,#instance.hashes do
+ local hash = instance.hashes[i]
+ local cachename = hash.name
+ if hash.cache then
+ local content = instance.files[cachename]
+ caches.collapsecontent(content)
+ if trace_locating then
+ report_resolving("saving tree '%s'",cachename)
+ end
+ caches.savecontent(cachename,"files",content)
+ elseif trace_locating then
+ report_resolving("not saving runtime tree '%s'",cachename)
end
- instance.rootpath = file.collapse_path(instance.rootpath)
- resolvers.loadnewconfig()
- collapse_cnf_data()
end
- check_configuration()
end
--- database loading
-
-function resolvers.load_hash()
- resolvers.locatelists()
+local function load_databases()
+ locate_file_databases()
if instance.diskcache and not instance.renewcache then
- resolvers.loadfiles()
+ load_file_databases()
if instance.loaderror then
- resolvers.loadlists()
- resolvers.savefiles()
+ generate_file_databases()
+ save_file_databases()
end
else
- resolvers.loadlists()
+ generate_file_databases()
if instance.renewcache then
- resolvers.savefiles()
+ save_file_databases()
end
end
end
-function resolvers.append_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' appended",tag)
+function resolvers.appendhash(type,name,cache)
+ -- safeguard ... tricky as it's actually a bug when seen twice
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash '%s' appended",name)
+ end
+ insert(instance.hashes, { type = type, name = name, cache = cache } )
+ instance.hashed[name] = cache
end
- insert(instance.hashes, { ['type']=type, ['tag']=tag, ['name']=name } )
end
-function resolvers.prepend_hash(type,tag,name)
- if trace_locating then
- logs.report("fileio","hash '%s' prepended",tag)
+function resolvers.prependhash(type,name,cache)
+ -- safeguard ... tricky as it's actually a bug when seen twice
+ if not instance.hashed[name] then
+ if trace_locating then
+ report_resolving("hash '%s' prepended",name)
+ end
+ insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
+ instance.hashed[name] = cache
end
- insert(instance.hashes, 1, { ['type']=type, ['tag']=tag, ['name']=name } )
end
-function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expanded_path_list('TEXMF') -- full expansion
- local t = resolvers.split_path(resolvers.env('TEXMF'))
+function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
+ local t = resolvers.splitpath(getenv('TEXMF'))
insert(t,1,specification)
local newspec = concat(t,";")
if instance.environment["TEXMF"] then
@@ -738,249 +506,18 @@ function resolvers.extend_texmf_var(specification) -- crap, we could better prep
else
-- weird
end
- resolvers.expand_variables()
reset_hashes()
end
--- locators
-
-function resolvers.locatelists()
- local texmfpaths = resolvers.clean_path_list('TEXMF')
- for i=1,#texmfpaths do
- local path = texmfpaths[i]
- if trace_locating then
- logs.report("fileio","locating list of '%s'",path)
- end
- resolvers.locatedatabase(file.collapse_path(path))
- end
-end
-
-function resolvers.locatedatabase(specification)
- return resolvers.methodhandler('locators', specification)
-end
-
-function resolvers.locators.tex(specification)
- if specification and specification ~= '' and lfs.isdir(specification) then
- if trace_locating then
- logs.report("fileio","tex locator '%s' found",specification)
- end
- resolvers.append_hash('file',specification,filename)
- elseif trace_locating then
- logs.report("fileio","tex locator '%s' not found",specification)
- end
-end
-
--- hashers
-
-function resolvers.hashdatabase(tag,name)
- return resolvers.methodhandler('hashers',tag,name)
-end
-
-function resolvers.loadfiles()
- instance.loaderror = false
- instance.files = { }
- if not instance.renewcache then
- local hashes = instance.hashes
- for k=1,#hashes do
- local hash = hashes[k]
- resolvers.hashdatabase(hash.tag,hash.name)
- if instance.loaderror then break end
- end
- end
-end
-
-function resolvers.hashers.tex(tag,name)
- resolvers.load_data(tag,'files')
-end
-
--- generators:
-
-function resolvers.loadlists()
- local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.generatedatabase(hashes[i].tag)
- end
-end
-
-function resolvers.generatedatabase(specification)
- return resolvers.methodhandler('generators', specification)
-end
-
--- starting with . or .. etc or funny char
-
-local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-
---~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpeg.P(" ")
---~ local l_character = lpeg.patterns.utf8
---~ local l_dangerous = lpeg.P(".")
-
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1)
---~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false)
-
---~ local function test(str)
---~ print(str,lpeg.match(l_normal,str))
---~ end
---~ test("ヒラギノ明朝 Pro W3")
---~ test("..ヒラギノ明朝 Pro W3")
---~ test(":ヒラギノ明朝 Pro W3;")
---~ test("ヒラギノ明朝 /Pro W3;")
---~ test("ヒラギノ明朝 Pro W3")
-
-function resolvers.generators.tex(specification)
- local tag = specification
- if trace_locating then
- logs.report("fileio","scanning path '%s'",specification)
- end
- instance.files[tag] = { }
- local files = instance.files[tag]
- local n, m, r = 0, 0, 0
- local spec = specification .. '/'
- local attributes = lfs.attributes
- local directory = lfs.dir
- local function action(path)
- local full
- if path then
- full = spec .. path .. '/'
- else
- full = spec
- end
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- -- if lpegmatch(l_normal,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- if path then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- if path then
- action(path..'/'..name)
- else
- action(name)
- end
- end
- end
- end
- end
- action()
- if trace_locating then
- logs.report("fileio","%s files found on %s directories with %s uppercase remappings",n,m,r)
- end
-end
-
--- savers, todo
-
-function resolvers.savefiles()
- resolvers.save_data('files')
-end
-
--- A config (optionally) has the paths split in tables. Internally
--- we join them and split them after the expansion has taken place. This
--- is more convenient.
-
---~ local checkedsplit = string.checkedsplit
-
-local cache = { }
-
-local splitter = lpeg.Ct(lpeg.splitat(lpeg.S(os.type == "windows" and ";" or ":;")))
-
-local function split_kpse_path(str) -- beware, this can be either a path or a {specification}
- local found = cache[str]
- if not found then
- if str == "" then
- found = { }
- else
- str = gsub(str,"\\","/")
---~ local split = (find(str,";") and checkedsplit(str,";")) or checkedsplit(str,io.pathseparator)
-local split = lpegmatch(splitter,str)
- found = { }
- for i=1,#split do
- local s = split[i]
- if not find(s,"^{*unset}*") then
- found[#found+1] = s
- end
- end
- if trace_expansions then
- logs.report("fileio","splitting path specification '%s'",str)
- for k=1,#found do
- logs.report("fileio","% 4i: %s",k,found[k])
- end
- end
- cache[str] = found
- end
- end
- return found
-end
-
-resolvers.split_kpse_path = split_kpse_path
-
-function resolvers.splitconfig()
- for i=1,#instance do
- local c = instance[i]
- for k,v in next, c do
- if type(v) == 'string' then
- local t = split_kpse_path(v)
- if #t > 1 then
- c[k] = t
- end
- end
- end
- end
-end
-
-function resolvers.joinconfig()
- local order = instance.order
- for i=1,#order do
- local c = order[i]
- for k,v in next, c do -- indexed?
- if type(v) == 'table' then
- c[k] = file.join_path(v)
- end
- end
- end
-end
-
-function resolvers.split_path(str)
- if type(str) == 'table' then
- return str
- else
- return split_kpse_path(str)
- end
-end
-
-function resolvers.join_path(str)
- if type(str) == 'table' then
- return file.join_path(str)
- else
- return str
- end
-end
-
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
- local t, h, p = { }, { }, split_kpse_path(v)
+ local t, tn, h, p = { }, 0, { }, splitconfigurationpath(v)
for kk=1,#p do
local vv = p[kk]
if vv ~= "" and not h[vv] then
- t[#t+1] = vv
+ tn = tn + 1
+ t[tn] = vv
h[vv] = true
end
end
@@ -994,343 +531,92 @@ end
-- end of split/join code
-function resolvers.saveoldconfig()
- resolvers.splitconfig()
- resolvers.save_data('configuration')
- resolvers.joinconfig()
-end
-
-resolvers.configbanner = [[
--- This is a Luatex configuration file created by 'luatools.lua' or
--- 'luatex.exe' directly. For comment, suggestions and questions you can
--- contact the ConTeXt Development Team. This configuration file is
--- not copyrighted. [HH & TH]
-]]
-
-function resolvers.serialize(files)
- -- This version is somewhat optimized for the kind of
- -- tables that we deal with, so it's much faster than
- -- the generic serializer. This makes sense because
- -- luatools and mtxtools are called frequently. Okay,
- -- we pay a small price for properly tabbed tables.
- local t = { }
- local function dump(k,v,m) -- could be moved inline
- if type(v) == 'string' then
- return m .. "['" .. k .. "']='" .. v .. "',"
- elseif #v == 1 then
- return m .. "['" .. k .. "']='" .. v[1] .. "',"
- else
- return m .. "['" .. k .. "']={'" .. concat(v,"','").. "'},"
- end
- end
- t[#t+1] = "return {"
- if instance.sortdata then
- local sortedfiles = sortedkeys(files)
- for i=1,#sortedfiles do
- local k = sortedfiles[i]
- local fk = files[k]
- if type(fk) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- local sortedfk = sortedkeys(fk)
- for j=1,#sortedfk do
- local kk = sortedfk[j]
- t[#t+1] = dump(kk,fk[kk],"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,fk,"\t")
- end
- end
- else
- for k, v in next, files do
- if type(v) == 'table' then
- t[#t+1] = "\t['" .. k .. "']={"
- for kk,vv in next, v do
- t[#t+1] = dump(kk,vv,"\t\t")
- end
- t[#t+1] = "\t},"
- else
- t[#t+1] = dump(k,v,"\t")
- end
- end
- end
- t[#t+1] = "}"
- return concat(t,"\n")
-end
-
-local data_state = { }
-
-function resolvers.data_state()
- return data_state or { }
-end
-
-function resolvers.save_data(dataname, makename) -- untested without cache overload
- for cachename, files in next, instance[dataname] do
- local name = (makename or file.join)(cachename,dataname)
- local luaname, lucname = name .. ".lua", name .. ".luc"
- if trace_locating then
- logs.report("fileio","preparing '%s' for '%s'",dataname,cachename)
- end
- for k, v in next, files do
- if type(v) == "table" and #v == 1 then
- files[k] = v[1]
- end
- end
- local data = {
- type = dataname,
- root = cachename,
- version = resolvers.cacheversion,
- date = os.date("%Y-%m-%d"),
- time = os.date("%H:%M:%S"),
- content = files,
- uuid = os.uuid(),
- }
- local ok = io.savedata(luaname,resolvers.serialize(data))
- if ok then
- if trace_locating then
- logs.report("fileio","'%s' saved in '%s'",dataname,luaname)
- end
- if utils.lua.compile(luaname,lucname,false,true) then -- no cleanup but strip
- if trace_locating then
- logs.report("fileio","'%s' compiled to '%s'",dataname,lucname)
- end
- else
- if trace_locating then
- logs.report("fileio","compiling failed for '%s', deleting file '%s'",dataname,lucname)
- end
- os.remove(lucname)
- end
- elseif trace_locating then
- logs.report("fileio","unable to save '%s' in '%s' (access error)",dataname,luaname)
- end
- end
-end
-
-function resolvers.load_data(pathname,dataname,filename,makename) -- untested without cache overload
- filename = ((not filename or (filename == "")) and dataname) or filename
- filename = (makename and makename(dataname,filename)) or file.join(pathname,filename)
- local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua")
- if blob then
- local data = blob()
- if data and data.content and data.type == dataname and data.version == resolvers.cacheversion then
- data_state[#data_state+1] = data.uuid
- if trace_locating then
- logs.report("fileio","loading '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = data.content
- else
- if trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
- instance[dataname][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping '%s' for '%s' from '%s'",dataname,pathname,filename)
- end
-end
-
--- some day i'll use the nested approach, but not yet (actually we even drop
--- engine/progname support since we have only luatex now)
---
--- first texmfcnf.lua files are located, next the cached texmf.cnf files
---
--- return {
--- TEXMFBOGUS = 'effe checken of dit werkt',
--- }
-
-function resolvers.resetconfig()
- identify_own()
- instance.configuration, instance.setup, instance.order, instance.loaderror = { }, { }, { }, false
-end
-
-function resolvers.loadnewconfig()
- local luafiles = instance.luafiles
- for i=1,#luafiles do
- local cnf = luafiles[i]
- local pathname = file.dirname(cnf)
- local filename = file.join(pathname,resolvers.luaname)
- local blob = loadfile(filename)
- if blob then
- local data = blob()
- if data then
- if trace_locating then
- logs.report("fileio","loading configuration file '%s'",filename)
- end
- if true then
- -- flatten to variable.progname
- local t = { }
- for k, v in next, data do -- v = progname
- if type(v) == "string" then
- t[k] = v
- else
- for kk, vv in next, v do -- vv = variable
- if type(vv) == "string" then
- t[vv.."."..v] = kk
- end
- end
- end
- end
- instance['setup'][pathname] = t
- else
- instance['setup'][pathname] = data
- end
- else
- if trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance['setup'][pathname] = { }
- instance.loaderror = true
- end
- elseif trace_locating then
- logs.report("fileio","skipping configuration file '%s'",filename)
- end
- instance.order[#instance.order+1] = instance.setup[pathname]
- if instance.loaderror then break end
- end
-end
+-- we used to have 'files' and 'configurations' so therefore the following
+-- shared function
-function resolvers.loadoldconfig()
- if not instance.renewcache then
- local cnffiles = instance.cnffiles
- for i=1,#cnffiles do
- local cnf = cnffiles[i]
- local dname = file.dirname(cnf)
- resolvers.load_data(dname,'configuration')
- instance.order[#instance.order+1] = instance.configuration[dname]
- if instance.loaderror then break end
- end
- end
- resolvers.joinconfig()
-end
-
-function resolvers.expand_variables()
- local expansions, environment, variables = { }, instance.environment, instance.variables
- local env = resolvers.env
- instance.expansions = expansions
- local engine, progname = instance.engine, instance.progname
- if type(engine) ~= "string" then instance.engine, engine = "", "" end
- if type(progname) ~= "string" then instance.progname, progname = "", "" end
- if engine ~= "" then environment['engine'] = engine end
- if progname ~= "" then environment['progname'] = progname end
- for k,v in next, environment do
- local a, b = match(k,"^(%a+)%_(.*)%s*$")
- if a and b then
- expansions[a..'.'..b] = v
- else
- expansions[k] = v
- end
- end
- for k,v in next, environment do -- move environment to expansions
- if not expansions[k] then expansions[k] = v end
- end
- for k,v in next, variables do -- move variables to expansions
- if not expansions[k] then expansions[k] = v end
- end
- local busy = false
- local function resolve(a)
- busy = true
- return expansions[a] or env(a)
- end
- while true do
- busy = false
- for k,v in next, expansions do
- local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
- local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
- if n > 0 or m > 0 then
- expansions[k]= s
- end
- end
- if not busy then break end
- end
- for k,v in next, expansions do
- expansions[k] = gsub(v,"\\", '/')
- end
+function resolvers.datastate()
+ return caches.contentstate()
end
function resolvers.variable(name)
- return entry(instance.variables,name)
+ local name = name and lpegmatch(dollarstripper,name)
+ local result = name and instance.variables[name]
+ return result ~= nil and result or ""
end
function resolvers.expansion(name)
- return entry(instance.expansions,name)
+ local name = name and lpegmatch(dollarstripper,name)
+ local result = name and instance.expansions[name]
+ return result ~= nil and result or ""
end
-function resolvers.is_variable(name)
- return is_entry(instance.variables,name)
-end
-
-function resolvers.is_expansion(name)
- return is_entry(instance.expansions,name)
-end
-
-function resolvers.unexpanded_path_list(str)
+function resolvers.unexpandedpathlist(str)
local pth = resolvers.variable(str)
- local lst = resolvers.split_path(pth)
- return expanded_path_from_list(lst)
+ local lst = resolvers.splitpath(pth)
+ return expandedpathfromlist(lst)
end
-function resolvers.unexpanded_path(str)
- return file.join_path(resolvers.unexpanded_path_list(str))
+function resolvers.unexpandedpath(str)
+ return joinpath(resolvers.unexpandedpathlist(str))
end
-do -- no longer needed
+local done = { }
- local done = { }
-
- function resolvers.reset_extra_path()
- local ep = instance.extra_paths
- if not ep then
- ep, done = { }, { }
- instance.extra_paths = ep
- elseif #ep > 0 then
- instance.lists, done = { }, { }
- end
+function resolvers.resetextrapath()
+ local ep = instance.extra_paths
+ if not ep then
+ ep, done = { }, { }
+ instance.extra_paths = ep
+ elseif #ep > 0 then
+ instance.lists, done = { }, { }
end
+end
- function resolvers.register_extra_path(paths,subpaths)
- local ep = instance.extra_paths or { }
- local n = #ep
- if paths and paths ~= "" then
- if subpaths and subpaths ~= "" then
- for p in gmatch(paths,"[^,]+") do
- -- we gmatch each step again, not that fast, but used seldom
- for s in gmatch(subpaths,"[^,]+") do
- local ps = p .. "/" .. s
- if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
- done[ps] = true
- end
- end
- end
- else
- for p in gmatch(paths,"[^,]+") do
- if not done[p] then
- ep[#ep+1] = resolvers.clean_path(p)
- done[p] = true
- end
- end
- end
- elseif subpaths and subpaths ~= "" then
- for i=1,n do
+function resolvers.registerextrapath(paths,subpaths)
+ local ep = instance.extra_paths or { }
+ local oldn = #ep
+ local newn = oldn
+ if paths and paths ~= "" then
+ if subpaths and subpaths ~= "" then
+ for p in gmatch(paths,"[^,]+") do
-- we gmatch each step again, not that fast, but used seldom
for s in gmatch(subpaths,"[^,]+") do
- local ps = ep[i] .. "/" .. s
+ local ps = p .. "/" .. s
if not done[ps] then
- ep[#ep+1] = resolvers.clean_path(ps)
+ newn = newn + 1
+ ep[newn] = resolvers.cleanpath(ps)
done[ps] = true
end
end
end
+ else
+ for p in gmatch(paths,"[^,]+") do
+ if not done[p] then
+ newn = newn + 1
+ ep[newn] = resolvers.cleanpath(p)
+ done[p] = true
+ end
+ end
end
- if #ep > 0 then
- instance.extra_paths = ep -- register paths
- end
- if #ep > n then
- instance.lists = { } -- erase the cache
+ elseif subpaths and subpaths ~= "" then
+ for i=1,oldn do
+ -- we gmatch each step again, not that fast, but used seldom
+ for s in gmatch(subpaths,"[^,]+") do
+ local ps = ep[i] .. "/" .. s
+ if not done[ps] then
+ newn = newn + 1
+ ep[newn] = resolvers.cleanpath(ps)
+ done[ps] = true
+ end
+ end
end
end
-
+ if newn > 0 then
+ instance.extra_paths = ep -- register paths
+ end
+ if newn > oldn then
+ instance.lists = { } -- erase the cache
+ end
end
local function made_list(instance,list)
@@ -1338,14 +624,15 @@ local function made_list(instance,list)
if not ep or #ep == 0 then
return list
else
- local done, new = { }, { }
+ local done, new, newn = { }, { }, 0
-- honour . .. ../.. but only when at the start
for k=1,#list do
local v = list[k]
if not done[v] then
if find(v,"^[%.%/]$") then
done[v] = true
- new[#new+1] = v
+ newn = newn + 1
+ new[newn] = v
else
break
end
@@ -1356,7 +643,8 @@ local function made_list(instance,list)
local v = ep[k]
if not done[v] then
done[v] = true
- new[#new+1] = v
+ newn = newn + 1
+ new[newn] = v
end
end
-- next the formal paths
@@ -1364,118 +652,95 @@ local function made_list(instance,list)
local v = list[k]
if not done[v] then
done[v] = true
- new[#new+1] = v
+ newn = newn + 1
+ new[newn] = v
end
end
return new
end
end
-function resolvers.clean_path_list(str)
- local t = resolvers.expanded_path_list(str)
+function resolvers.cleanpathlist(str)
+ local t = resolvers.expandedpathlist(str)
if t then
for i=1,#t do
- t[i] = file.collapse_path(resolvers.clean_path(t[i]))
+ t[i] = collapsepath(resolvers.cleanpath(t[i]))
end
end
return t
end
-function resolvers.expand_path(str)
- return file.join_path(resolvers.expanded_path_list(str))
+function resolvers.expandpath(str)
+ return joinpath(resolvers.expandedpathlist(str))
end
-function resolvers.expanded_path_list(str)
+function resolvers.expandedpathlist(str)
if not str then
- return ep or { } -- ep ?
+ return { }
elseif instance.savelists then
- -- engine+progname hash
- str = gsub(str,"%$","")
+ str = lpegmatch(dollarstripper,str)
if not instance.lists[str] then -- cached
- local lst = made_list(instance,resolvers.split_path(resolvers.expansion(str)))
- instance.lists[str] = expanded_path_from_list(lst)
+ local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
+ instance.lists[str] = expandedpathfromlist(lst)
end
return instance.lists[str]
else
- local lst = resolvers.split_path(resolvers.expansion(str))
- return made_list(instance,expanded_path_from_list(lst))
+ local lst = resolvers.splitpath(resolvers.expansion(str))
+ return made_list(instance,expandedpathfromlist(lst))
end
end
-function resolvers.expanded_path_list_from_var(str) -- brrr
- local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$",""))
- if tmp ~= "" then
- return resolvers.expanded_path_list(tmp)
- else
- return resolvers.expanded_path_list(str)
- end
-end
-
-function resolvers.expand_path_from_var(str)
- return file.join_path(resolvers.expanded_path_list_from_var(str))
+function resolvers.expandedpathlistfromvariable(str) -- brrr
+ str = lpegmatch(dollarstripper,str)
+ local tmp = resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
end
-function resolvers.format_of_var(str)
- return formats[str] or formats[alternatives[str]] or ''
-end
-function resolvers.format_of_suffix(str)
- return suffixmap[file.extname(str)] or 'tex'
+function resolvers.expandpathfromvariable(str)
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
-function resolvers.variable_of_format(str)
- return formats[str] or formats[alternatives[str]] or ''
+function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
+ local ori = resolvers.variable(str)
+ local pth = expandedpathfromlist(resolvers.splitpath(ori))
+ return joinpath(pth)
end
-function resolvers.var_of_format_or_suffix(str)
- local v = formats[str]
- if v then
- return v
- end
- v = formats[alternatives[str]]
- if v then
- return v
- end
- v = suffixmap[file.extname(str)]
- if v then
- return formats[isf]
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name] = content
+ else
+ instance.files[name] = { }
+ if somerror == true then -- can be unset
+ instance.loaderror = someerror
+ end
end
- return ''
-end
-
-function resolvers.expand_braces(str) -- output variable and brace expansion of STRING
- local ori = resolvers.variable(str)
- local pth = expanded_path_from_list(resolvers.split_path(ori))
- return file.join_path(pth)
end
-resolvers.isreadable = { }
-
-function resolvers.isreadable.file(name)
- local readable = lfs.isfile(name) -- brrr
+local function isreadable(name)
+ local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir
if trace_detail then
if readable then
- logs.report("fileio","file '%s' is readable",name)
+ report_resolving("file '%s' is readable",name)
else
- logs.report("fileio","file '%s' is not readable", name)
+ report_resolving("file '%s' is not readable", name)
end
end
return readable
end
-resolvers.isreadable.tex = resolvers.isreadable.file
-
-- name
-- name/name
local function collect_files(names)
- local filelist = { }
+ local filelist, noffiles = { }, 0
for k=1,#names do
local fname = names[k]
if trace_detail then
- logs.report("fileio","checking name '%s'",fname)
+ report_resolving("checking name '%s'",fname)
end
- local bname = file.basename(fname)
- local dname = file.dirname(fname)
+ local bname = filebasename(fname)
+ local dname = filedirname(fname)
if dname == "" or find(dname,"^%.") then
dname = false
else
@@ -1484,11 +749,11 @@ local function collect_files(names)
local hashes = instance.hashes
for h=1,#hashes do
local hash = hashes[h]
- local blobpath = hash.tag
+ local blobpath = hash.name
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
- logs.report("fileio","deep checking '%s' (%s)",blobpath,bname)
+ report_resolving("deep checking '%s' (%s)",blobpath,bname)
end
local blobfile = files[bname]
if not blobfile then
@@ -1500,62 +765,51 @@ local function collect_files(names)
end
end
if blobfile then
+ local blobroot = files.__path__ or blobpath
if type(blobfile) == 'string' then
if not dname or find(blobfile,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,blobfile,bname), -- search
- resolvers.concatinators[hash.type](blobpath,blobfile,bname) -- result
- }
+ local variant = hash.type
+ -- local search = filejoin(blobpath,blobfile,bname)
+ local search = filejoin(blobroot,blobfile,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
+ if trace_detail then
+ report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
+ end
+ noffiles = noffiles + 1
+ filelist[noffiles] = { variant, search, result }
end
else
for kk=1,#blobfile do
local vv = blobfile[kk]
if not dname or find(vv,dname) then
- filelist[#filelist+1] = {
- hash.type,
- file.join(blobpath,vv,bname), -- search
- resolvers.concatinators[hash.type](blobpath,vv,bname) -- result
- }
+ local variant = hash.type
+ -- local search = filejoin(blobpath,vv,bname)
+ local search = filejoin(blobroot,vv,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
+ if trace_detail then
+ report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result)
+ end
+ noffiles = noffiles + 1
+ filelist[noffiles] = { variant, search, result }
end
end
end
end
elseif trace_locating then
- logs.report("fileio","no match in '%s' (%s)",blobpath,bname)
+ report_resolving("no match in '%s' (%s)",blobpath,bname)
end
end
end
- if #filelist > 0 then
- return filelist
- else
- return nil
- end
-end
-
-function resolvers.suffix_of_format(str)
- if suffixes[str] then
- return suffixes[str][1]
- else
- return ""
- end
+ return noffiles > 0 and filelist or nil
end
-function resolvers.suffixes_of_format(str)
- if suffixes[str] then
- return suffixes[str]
- else
- return {}
- end
-end
-
-function resolvers.register_in_trees(name)
+function resolvers.registerintrees(name)
if not find(name,"^%.") then
instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one
end
end
--- split the next one up for readability (bu this module needs a cleanup anyway)
+-- split the next one up for readability (but this module needs a cleanup anyway)
local function can_be_dir(name) -- can become local
local fakepaths = instance.fakepaths
@@ -1566,61 +820,62 @@ local function can_be_dir(name) -- can become local
fakepaths[name] = 2 -- no directory
end
end
- return (fakepaths[name] == 1)
+ return fakepaths[name] == 1
end
-local function collect_instance_files(filename,collected) -- todo : plugin (scanners, checkers etc)
- local result = collected or { }
+local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
+
+-- this one is split in smaller functions but it needs testing
+
+local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
+ local result = { }
local stamp = nil
- filename = file.collapse_path(filename)
+ askedformat = askedformat or ""
+ filename = collapsepath(filename)
-- speed up / beware: format problem
- if instance.remember then
- stamp = filename .. "--" .. instance.engine .. "--" .. instance.progname .. "--" .. instance.format
+ if instance.remember and not allresults then
+ stamp = filename .. "--" .. askedformat
if instance.found[stamp] then
if trace_locating then
- logs.report("fileio","remembering file '%s'",filename)
+ report_resolving("remembered file '%s'",filename)
end
+ resolvers.registerintrees(filename) -- for tracing used files
return instance.found[stamp]
end
end
- if not dangerous[instance.format or "?"] then
- if resolvers.isreadable.file(filename) then
+ if not dangerous[askedformat] then
+ if isreadable(filename) then
if trace_detail then
- logs.report("fileio","file '%s' found directly",filename)
+ report_resolving("file '%s' found directly",filename)
+ end
+ if stamp then
+ instance.found[stamp] = { filename }
end
- instance.found[stamp] = { filename }
return { filename }
end
end
if find(filename,'%*') then
if trace_locating then
- logs.report("fileio","checking wildcard '%s'", filename)
+ report_resolving("checking wildcard '%s'", filename)
end
- result = resolvers.find_wildcard_files(filename)
+ result = resolvers.findwildcardfiles(filename) -- we can use th elocal
elseif file.is_qualified_path(filename) then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolving("qualified name '%s'", filename)
end
result = { filename }
else
- local forcedname, ok, suffix = "", false, file.extname(filename)
+ local forcedname, ok, suffix = "", false, fileextname(filename)
if suffix == "" then -- why
- if instance.format == "" then
- forcedname = filename .. ".tex"
- if resolvers.isreadable.file(forcedname) then
- if trace_locating then
- logs.report("fileio","no suffix, forcing standard filetype 'tex'")
- end
- result, ok = { forcedname }, true
- end
- else
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
+ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ local s = format_suffixes[i]
forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
+ if isreadable(forcedname) then
if trace_locating then
- logs.report("fileio","no suffix, forcing format filetype '%s'", s)
+ report_resolving("no suffix, forcing format filetype '%s'", s)
end
result, ok = { forcedname }, true
break
@@ -1631,23 +886,24 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
if not ok and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
- local basename = file.basename(filename)
- local pattern = gsub(filename .. "$","([%.%-])","%%%1")
- local savedformat = instance.format
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
+ -- messy .. to be sorted out
+ local savedformat = askedformat
local format = savedformat or ""
if format == "" then
- instance.format = resolvers.format_of_suffix(suffix)
+ askedformat = resolvers.formatofsuffix(suffix)
end
if not format then
- instance.format = "othertextfiles" -- kind of everything, maybe texinput is better
+ askedformat = "othertextfiles" -- kind of everything, maybe texinput is better
end
--
if basename ~= filename then
- local resolved = collect_instance_files(basename)
- if #result == 0 then
+ local resolved = collect_instance_files(basename,askedformat,allresults)
+ if #result == 0 then -- shouldn't this be resolved ?
local lowered = lower(basename)
if filename ~= lowered then
- resolved = collect_instance_files(lowered)
+ resolved = collect_instance_files(lowered,askedformat,allresults)
end
end
resolvers.format = savedformat
@@ -1672,57 +928,60 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
-- end
end
if not ok and trace_locating then
- logs.report("fileio","qualified name '%s'", filename)
+ report_resolving("qualified name '%s'", filename)
end
end
else
-- search spec
- local filetype, extra, done, wantedfiles, ext = '', nil, false, { }, file.extname(filename)
- if ext == "" then
- if not instance.force_suffixes then
- wantedfiles[#wantedfiles+1] = filename
- end
- else
- wantedfiles[#wantedfiles+1] = filename
- end
- if instance.format == "" then
- if ext == "" then
- local forcedname = filename .. '.tex'
- wantedfiles[#wantedfiles+1] = forcedname
- filetype = resolvers.format_of_suffix(forcedname)
- if trace_locating then
- logs.report("fileio","forcing filetype '%s'",filetype)
+ local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename)
+ -- -- tricky as filename can be bla.1.2.3
+ -- if not suffixmap[ext] then --- probably needs to be done elsewhere too
+ -- wantedfiles[#wantedfiles+1] = filename
+ -- end
+ wantedfiles[#wantedfiles+1] = filename
+ if askedformat == "" then
+ if ext == "" or not suffixmap[ext] then
+ local defaultsuffixes = resolvers.defaultsuffixes
+ for i=1,#defaultsuffixes do
+ local forcedname = filename .. '.' .. defaultsuffixes[i]
+ wantedfiles[#wantedfiles+1] = forcedname
+ filetype = resolvers.formatofsuffix(forcedname)
+ if trace_locating then
+ report_resolving("forcing filetype '%s'",filetype)
+ end
end
else
- filetype = resolvers.format_of_suffix(filename)
+ filetype = resolvers.formatofsuffix(filename)
if trace_locating then
- logs.report("fileio","using suffix based filetype '%s'",filetype)
+ report_resolving("using suffix based filetype '%s'",filetype)
end
end
else
- if ext == "" then
- local suffixes = resolvers.suffixes_of_format(instance.format)
- for _, s in next, suffixes do
- wantedfiles[#wantedfiles+1] = filename .. "." .. s
+ if ext == "" or not suffixmap[ext] then
+ local format_suffixes = suffixes[askedformat]
+ if format_suffixes then
+ for i=1,#format_suffixes do
+ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+ end
end
end
- filetype = instance.format
+ filetype = askedformat
if trace_locating then
- logs.report("fileio","using given filetype '%s'",filetype)
+ report_resolving("using given filetype '%s'",filetype)
end
end
- local typespec = resolvers.variable_of_format(filetype)
- local pathlist = resolvers.expanded_path_list(typespec)
+ local typespec = resolvers.variableofformat(filetype)
+ local pathlist = resolvers.expandedpathlist(typespec)
if not pathlist or #pathlist == 0 then
-- no pathlist, access check only / todo == wildcard
if trace_detail then
- logs.report("fileio","checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
- if fname and resolvers.isreadable.file(fname) then
+ if fname and isreadable(fname) then
filename, done = fname, true
- result[#result+1] = file.join('.',fname)
+ result[#result+1] = filejoin('.',fname)
break
end
end
@@ -1730,8 +989,8 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local filelist = collect_files(wantedfiles)
local fl = filelist and filelist[1]
if fl then
- filename = fl[3]
- result[#result+1] = filename
+ filename = fl[3] -- not local?
+ result[#result+1] = resolvers.resolve(filename)
done = true
end
else
@@ -1740,76 +999,65 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
local dirlist = { }
if filelist then
for i=1,#filelist do
- dirlist[i] = file.dirname(filelist[i][2]) .. "/"
+ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
end
end
if trace_detail then
- logs.report("fileio","checking filename '%s'",filename)
+ report_resolving("checking filename '%s'",filename)
end
- -- a bit messy ... esp the doscan setting here
- local doscan
for k=1,#pathlist do
local path = pathlist[k]
- if find(path,"^!!") then doscan = false else doscan = true end
- local pathname = gsub(path,"^!+", '')
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
done = false
-- using file list
if filelist then
- local expression
-- compare list entries with permitted pattern -- /xx /xx//
- if not find(pathname,"/$") then
- expression = pathname .. "/"
- else
- expression = pathname
- end
- expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
- expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
- expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
- expression = "^" .. expression .. "$"
+ local expression = makepathexpression(pathname)
if trace_detail then
- logs.report("fileio","using pattern '%s' for path '%s'",expression,pathname)
+ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
end
for k=1,#filelist do
local fl = filelist[k]
local f = fl[2]
local d = dirlist[k]
if find(d,expression) then
- --- todo, test for readable
- result[#result+1] = fl[3]
- resolvers.register_in_trees(f) -- for tracing used files
+ -- todo, test for readable
+ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
done = true
- if instance.allresults then
+ if allresults then
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', continue scanning",f,d)
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
end
else
if trace_detail then
- logs.report("fileio","match in hash for file '%s' on path '%s', quit scanning",f,d)
+ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
end
break
end
elseif trace_detail then
- logs.report("fileio","no match in hash for file '%s' on path '%s'",f,d)
+ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
end
end
end
if not done and doscan then
-- check if on disk / unchecked / does not work at all / also zips
- if resolvers.splitmethod(pathname).scheme == 'file' then -- ?
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
local pname = gsub(pathname,"%.%*$",'')
if not find(pname,"%*") then
local ppname = gsub(pname,"/+$","")
if can_be_dir(ppname) then
for k=1,#wantedfiles do
local w = wantedfiles[k]
- local fname = file.join(ppname,w)
- if resolvers.isreadable.file(fname) then
+ local fname = filejoin(ppname,w)
+ if isreadable(fname) then
if trace_detail then
- logs.report("fileio","found '%s' by scanning",fname)
+ report_resolving("found '%s' by scanning",fname)
end
result[#result+1] = fname
done = true
- if not instance.allresults then break end
+ if not allresults then break end
end
end
else
@@ -1819,56 +1067,357 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan
end
end
if not done and doscan then
- -- todo: slow path scanning
+ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
end
- if done and not instance.allresults then break end
+ if done and not allresults then break end
end
end
end
for k=1,#result do
- result[k] = file.collapse_path(result[k])
+ local rk = collapsepath(result[k])
+ result[k] = rk
+ resolvers.registerintrees(rk) -- for tracing used files
end
- if instance.remember then
+ if stamp then
instance.found[stamp] = result
end
return result
end
-if not resolvers.concatinators then resolvers.concatinators = { } end
+-- -- -- begin of main file search routing -- -- --
-resolvers.concatinators.tex = file.join
-resolvers.concatinators.file = resolvers.concatinators.tex
+--~ local collect_instance_files
-function resolvers.find_files(filename,filetype,mustexist)
- if type(mustexist) == boolean then
- -- all set
- elseif type(filetype) == 'boolean' then
- filetype, mustexist = nil, false
- elseif type(filetype) ~= 'string' then
- filetype, mustexist = nil, false
- end
- instance.format = filetype or ''
- local result = collect_instance_files(filename)
+--~ local function find_direct(filename)
+--~ if not dangerous[askedformat] and isreadable(filename) then
+--~ if trace_detail then
+--~ report_resolving("file '%s' found directly",filename)
+--~ end
+--~ return { filename }
+--~ end
+--~ end
+
+--~ local function find_wildcard(filename)
+--~ if find(filename,'%*') then
+--~ if trace_locating then
+--~ report_resolving("checking wildcard '%s'", filename)
+--~ end
+--~ return resolvers.findwildcardfiles(filename) -- we can use the local
+--~ end
+--~ end
+
+--~ local function find_qualified(filename) -- this one will be split too
+--~ if not file.is_qualified_path(filename) then
+--~ return
+--~ end
+--~ if trace_locating then
+--~ report_resolving("checking qualified name '%s'", filename)
+--~ end
+--~ if isreadable(filename) then
+--~ if trace_detail then
+--~ report_resolving("qualified file '%s' found", filename)
+--~ end
+--~ return { filename }
+--~ else
+--~ if trace_detail then
+--~ report_resolving("locating qualified file '%s'", filename)
+--~ end
+--~ local forcedname, suffix = "", fileextname(filename)
+--~ if suffix == "" then -- why
+--~ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat]
+--~ if format_suffixes then
+--~ for i=1,#format_suffixes do
+--~ local s = format_suffixes[i]
+--~ forcedname = filename .. "." .. s
+--~ if isreadable(forcedname) then
+--~ if trace_locating then
+--~ report_resolving("no suffix, forcing format filetype '%s'", s)
+--~ end
+--~ return { forcedname }
+--~ end
+--~ end
+--~ end
+--~ end
+--~ if suffix ~= "" then
+--~ -- try to find in tree (no suffix manipulation), here we search for the
+--~ -- matching last part of the name
+--~ local basename = filebasename(filename)
+--~ local pattern = lpegmatch(preparetreepattern,filename)
+--~ -- messy .. to be sorted out
+--~ local savedformat = askedformat
+--~ local format = savedformat or ""
+--~ if format == "" then
+--~ askedformat = resolvers.formatofsuffix(suffix)
+--~ end
+--~ if not format then
+--~ askedformat = "othertextfiles" -- kind of everything, maybe all
+--~ end
+--~ --
+--~ if basename ~= filename then
+--~ local resolved = collect_instance_files(basename,askedformat,allresults)
+--~ if #resolved == 0 then
+--~ local lowered = lower(basename)
+--~ if filename ~= lowered then
+--~ resolved = collect_instance_files(lowered,askedformat,allresults)
+--~ end
+--~ end
+--~ resolvers.format = savedformat
+--~ --
+--~ if #resolved > 0 then
+--~ local result = { }
+--~ for r=1,#resolved do
+--~ local rr = resolved[r]
+--~ if find(rr,pattern) then
+--~ result[#result+1] = rr
+--~ end
+--~ end
+--~ if #result > 0 then
+--~ return result
+--~ end
+--~ end
+--~ end
+--~ -- a real wildcard:
+--~ --
+--~ -- local filelist = collect_files({basename})
+--~ -- result = { }
+--~ -- for f=1,#filelist do
+--~ -- local ff = filelist[f][3] or ""
+--~ -- if find(ff,pattern) then
+--~ -- result[#result+1], ok = ff, true
+--~ -- end
+--~ -- end
+--~ -- if #result > 0 then
+--~ -- return result
+--~ -- end
+--~ end
+--~ end
+--~ end
+
+--~ local function find_analyze(filename,askedformat)
+--~ local filetype, wantedfiles, ext = '', { }, fileextname(filename)
+--~ -- too tricky as filename can be bla.1.2.3:
+--~ --
+--~ -- if not suffixmap[ext] then
+--~ -- wantedfiles[#wantedfiles+1] = filename
+--~ -- end
+--~ wantedfiles[#wantedfiles+1] = filename
+--~ if askedformat == "" then
+--~ if ext == "" or not suffixmap[ext] then
+--~ local defaultsuffixes = resolvers.defaultsuffixes
+--~ for i=1,#defaultsuffixes do
+--~ local forcedname = filename .. '.' .. defaultsuffixes[i]
+--~ wantedfiles[#wantedfiles+1] = forcedname
+--~ filetype = resolvers.formatofsuffix(forcedname)
+--~ if trace_locating then
+--~ report_resolving("forcing filetype '%s'",filetype)
+--~ end
+--~ end
+--~ else
+--~ filetype = resolvers.formatofsuffix(filename)
+--~ if trace_locating then
+--~ report_resolving("using suffix based filetype '%s'",filetype)
+--~ end
+--~ end
+--~ else
+--~ if ext == "" or not suffixmap[ext] then
+--~ local format_suffixes = suffixes[askedformat]
+--~ if format_suffixes then
+--~ for i=1,#format_suffixes do
+--~ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i]
+--~ end
+--~ end
+--~ end
+--~ filetype = askedformat
+--~ if trace_locating then
+--~ report_resolving("using given filetype '%s'",filetype)
+--~ end
+--~ end
+--~ return filetype, wantedfiles
+--~ end
+
+--~ local function find_intree(filename,filetype,wantedfiles)
+--~ local typespec = resolvers.variableofformat(filetype)
+--~ local pathlist = resolvers.expandedpathlist(typespec)
+--~ if pathlist and #pathlist > 0 then
+--~ -- list search
+--~ local filelist = collect_files(wantedfiles)
+--~ local dirlist = { }
+--~ if filelist then
+--~ for i=1,#filelist do
+--~ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble
+--~ end
+--~ end
+--~ if trace_detail then
+--~ report_resolving("checking filename '%s'",filename)
+--~ end
+--~ local result = { }
+--~ for k=1,#pathlist do
+--~ local path = pathlist[k]
+--~ local pathname = lpegmatch(inhibitstripper,path)
+--~ local doscan = path == pathname -- no ^!!
+--~ local done = false
+--~ -- using file list
+--~ if filelist then
+--~ -- compare list entries with permitted pattern -- /xx /xx//
+--~ local expression = makepathexpression(pathname)
+--~ if trace_detail then
+--~ report_resolving("using pattern '%s' for path '%s'",expression,pathname)
+--~ end
+--~ for k=1,#filelist do
+--~ local fl = filelist[k]
+--~ local f = fl[2]
+--~ local d = dirlist[k]
+--~ if find(d,expression) then
+--~ -- todo, test for readable
+--~ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut
+--~ done = true
+--~ if allresults then
+--~ if trace_detail then
+--~ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d)
+--~ end
+--~ else
+--~ if trace_detail then
+--~ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d)
+--~ end
+--~ break
+--~ end
+--~ elseif trace_detail then
+--~ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d)
+--~ end
+--~ end
+--~ end
+--~ if not done and doscan then
+--~ -- check if on disk / unchecked / does not work at all / also zips
+--~ local scheme = url.hasscheme(pathname)
+--~ if not scheme or scheme == "file" then
+--~ local pname = gsub(pathname,"%.%*$",'')
+--~ if not find(pname,"%*") then
+--~ local ppname = gsub(pname,"/+$","")
+--~ if can_be_dir(ppname) then
+--~ for k=1,#wantedfiles do
+--~ local w = wantedfiles[k]
+--~ local fname = filejoin(ppname,w)
+--~ if isreadable(fname) then
+--~ if trace_detail then
+--~ report_resolving("found '%s' by scanning",fname)
+--~ end
+--~ result[#result+1] = fname
+--~ done = true
+--~ if not allresults then break end
+--~ end
+--~ end
+--~ else
+--~ -- no access needed for non existing path, speedup (esp in large tree with lots of fake)
+--~ end
+--~ end
+--~ end
+--~ end
+--~ if not done and doscan then
+--~ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF
+--~ end
+--~ if done and not allresults then
+--~ return #result > 0 and result
+--~ end
+--~ end
+--~ end
+--~ end
+
+--~ local function find_onpath(filename,filetype,wantedfiles)
+--~ local done = nil
+--~ if trace_detail then
+--~ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | "))
+--~ end
+--~ for k=1,#wantedfiles do
+--~ local fname = wantedfiles[k]
+--~ if fname and isreadable(fname) then
+--~ filename, done = fname, true
+--~ result[#result+1] = filejoin('.',fname)
+--~ break
+--~ end
+--~ end
+--~ end
+
+--~ local function find_otherwise(filename,filetype,wantedfiles) -- other text files | any | whatever
+--~ local filelist = collect_files(wantedfiles)
+--~ local fl = filelist and filelist[1]
+--~ if fl then
+--~ return { resolvers.resolve(fl[3]) } -- filename
+--~ end
+--~ end
+
+--~ collect_instance_files = function(filename,askedformat,allresults) -- uses nested
+--~ local result, stamp, filetype, wantedfiles
+--~ askedformat = askedformat or ""
+--~ filename = collapsepath(filename)
+--~ if instance.remember and not allresults then
+--~ stamp = format("%s--%s", filename, askedformat)
+--~ result = stamp and instance.found[stamp]
+--~ if result then
+--~ if trace_locating then
+--~ report_resolving("remembered file '%s'",filename)
+--~ end
+--~ return result
+--~ end
+--~ end
+--~ result = find_direct (filename,stamp) or
+--~ find_wildcard (filename) or
+--~ find_qualified(filename)
+--~ if not result then
+--~ filetype, wantedfiles = find_analyze(filename,askedformat)
+--~ result = find_intree (filename,filetype,wantedfiles) or
+--~ find_onpath (filename,filetype,wantedfiles) or
+--~ find_otherwise(filename,filetype,wantedfiles)
+--~ end
+--~ if result then
+--~ for k=1,#result do
+--~ local rk = collapsepath(result[k])
+--~ result[k] = rk
+--~ resolvers.registerintrees(rk) -- for tracing used files
+--~ end
+--~ else
+--~ result = { } -- maybe false
+--~ end
+--~ if stamp then
+--~ if trace_locating then
+--~ report_resolving("remembering file '%s'",filename)
+--~ end
+--~ instance.found[stamp] = result
+--~ end
+--~ return result
+--~ end
+
+-- -- -- end of main file search routing -- -- --
+
+local function findfiles(filename,filetype,allresults)
+ local result = collect_instance_files(filename,filetype or "",allresults)
if #result == 0 then
local lowered = lower(filename)
if filename ~= lowered then
- return collect_instance_files(lowered)
+ return collect_instance_files(lowered,filetype or "",allresults)
end
end
- instance.format = ''
return result
end
-function resolvers.find_file(filename,filetype,mustexist)
- return (resolvers.find_files(filename,filetype,mustexist)[1] or "")
+function resolvers.findfiles(filename,filetype)
+ return findfiles(filename,filetype,true)
+end
+
+function resolvers.findfile(filename,filetype)
+ return findfiles(filename,filetype,false)[1] or ""
+end
+
+function resolvers.findpath(filename,filetype)
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
-function resolvers.find_given_files(filename)
- local bname, result = file.basename(filename), { }
+local function findgivenfiles(filename,allresults)
+ local bname, result = filebasename(filename), { }
local hashes = instance.hashes
+ local noffound = 0
for k=1,#hashes do
local hash = hashes[k]
- local files = instance.files[hash.tag] or { }
+ local files = instance.files[hash.name] or { }
local blist = files[bname]
if not blist then
local rname = "remap:"..bname
@@ -1880,13 +1429,21 @@ function resolvers.find_given_files(filename)
end
if blist then
if type(blist) == 'string' then
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,blist,bname) or ""
- if not instance.allresults then break end
+ local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
+ if found ~= "" then
+ noffound = noffound + 1
+ result[noffound] = resolvers.resolve(found)
+ if not allresults then break end
+ end
else
for kk=1,#blist do
local vv = blist[kk]
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,vv,bname) or ""
- if not instance.allresults then break end
+ local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
+ if found ~= "" then
+ noffound = noffound + 1
+ result[noffound] = resolvers.resolve(found)
+ if not allresults then break end
+ end
end
end
end
@@ -1894,24 +1451,31 @@ function resolvers.find_given_files(filename)
return result
end
-function resolvers.find_given_file(filename)
- return (resolvers.find_given_files(filename)[1] or "")
+function resolvers.findgivenfiles(filename)
+ return findgivenfiles(filename,true)
+end
+
+function resolvers.findgivenfile(filename)
+ return findgivenfiles(filename,false)[1] or ""
end
-local function doit(path,blist,bname,tag,kind,result,allresults)
+local function doit(path,blist,bname,tag,variant,result,allresults)
local done = false
- if blist and kind then
+ if blist and variant then
+ local resolve = resolvers.resolve -- added
if type(blist) == 'string' then
-- make function and share code
if find(lower(blist),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,blist,bname) or ""
+ local full = methodhandler('concatinators',variant,tag,blist,bname) or ""
+ result[#result+1] = resolve(full)
done = true
end
else
for kk=1,#blist do
local vv = blist[kk]
if find(lower(vv),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,vv,bname) or ""
+ local full = methodhandler('concatinators',variant,tag,vv,bname) or ""
+ result[#result+1] = resolve(full)
done = true
if not allresults then break end
end
@@ -1921,30 +1485,38 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
-function resolvers.find_wildcard_files(filename) -- todo: remap:
- local result = { }
- local bname, dname = file.basename(filename), file.dirname(filename)
- local path = gsub(dname,"^*/","")
- path = gsub(path,"*",".*")
- path = gsub(path,"-","%%-")
- if dname == "" then
- path = ".*"
- end
- local name = bname
- name = gsub(name,"*",".*")
- name = gsub(name,"-","%%-")
- path = lower(path)
- name = lower(name)
- local files, allresults, done = instance.files, instance.allresults, false
+--~ local makewildcard = Cs(
+--~ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+--~ + (P("^")^0 * P("/") / "") * (P("*")/".*" + P("-")/"%%-" + P("?")/"."+ P("\\")/"/" + P(1))^0
+--~ )
+
+local makewildcard = Cs(
+ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+ + (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0
+)
+
+function resolvers.wildcardpattern(pattern)
+ return lpegmatch(makewildcard,pattern) or pattern
+end
+
+local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg
+ result = result or { }
+--~ local path = lower(lpegmatch(makewildcard,filedirname (filename)))
+--~ local name = lower(lpegmatch(makewildcard,filebasename(filename)))
+ local base = filebasename(filename)
+ local dirn = filedirname(filename)
+ local path = lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name = lower(lpegmatch(makewildcard,base) or base)
+ local files, done = instance.files, false
if find(name,"%*") then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- for kk, hh in next, files[hash.tag] do
+ local hashname, hashtype = hash.name, hash.type
+ for kk, hh in next, files[hashname] do
if not find(kk,"^remap:") then
if find(lower(kk),name) then
- if doit(path,hh,kk,tag,kind,result,allresults) then done = true end
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -1954,8 +1526,8 @@ function resolvers.find_wildcard_files(filename) -- todo: remap:
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- if doit(path,files[tag][bname],bname,tag,kind,result,allresults) then done = true end
+ local hashname, hashtype = hash.name, hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -1964,8 +1536,12 @@ function resolvers.find_wildcard_files(filename) -- todo: remap:
return result
end
-function resolvers.find_wildcard_file(filename)
- return (resolvers.find_wildcard_files(filename)[1] or "")
+function resolvers.findwildcardfiles(filename,result)
+ return findwildcardfiles(filename,true,result)
+end
+
+function resolvers.findwildcardfile(filename)
+ return findwildcardfiles(filename,false)[1] or ""
end
-- main user functions
@@ -1976,34 +1552,33 @@ end
function resolvers.load(option)
statistics.starttiming(instance)
- resolvers.resetconfig()
- resolvers.identify_cnf()
- resolvers.load_lua() -- will become the new method
- resolvers.expand_variables()
- resolvers.load_cnf() -- will be skipped when we have a lua file
- resolvers.expand_variables()
+ identify_configuration_files()
+ load_configuration_files()
if option ~= "nofiles" then
- resolvers.load_hash()
+ load_databases()
resolvers.automount()
end
statistics.stoptiming(instance)
+ local files = instance.files
+ return files and next(files) and true
end
-function resolvers.for_files(command, files, filetype, mustexist)
+local function report(str)
+ if trace_locating then
+ report_resolving(str) -- has already verbose
+ else
+ print(str)
+ end
+end
+
+function resolvers.dowithfilesandreport(command, files, ...) -- will move
if files and #files > 0 then
- local function report(str)
- if trace_locating then
- logs.report("fileio",str) -- has already verbose
- else
- print(str)
- end
- end
if trace_locating then
report('') -- ?
end
for f=1,#files do
local file = files[f]
- local result = command(file,filetype,mustexist)
+ local result = command(file,...)
if type(result) == 'string' then
report(result)
else
@@ -2015,21 +1590,16 @@ function resolvers.for_files(command, files, filetype, mustexist)
end
end
--- strtab
+-- obsolete
-resolvers.var_value = resolvers.variable -- output the value of variable $STRING.
-resolvers.expand_var = resolvers.expansion -- output variable expansion of STRING.
+-- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING.
+-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
-function resolvers.show_path(str) -- output search path for file type NAME
- return file.join_path(resolvers.expanded_path_list(resolvers.format_of_var(str)))
+function resolvers.showpath(str) -- output search path for file type NAME
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
--- resolvers.find_file(filename)
--- resolvers.find_file(filename, filetype, mustexist)
--- resolvers.find_file(filename, mustexist)
--- resolvers.find_file(filename, filetype)
-
-function resolvers.register_file(files, name, path)
+function resolvers.registerfile(files, name, path)
if files[name] then
if type(files[name]) == 'string' then
files[name] = { files[name], path }
@@ -2041,129 +1611,94 @@ function resolvers.register_file(files, name, path)
end
end
-function resolvers.splitmethod(filename)
- if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
- return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original=filename } -- quick hack
- else
- return url.hashed(filename)
+function resolvers.dowithpath(name,func)
+ local pathlist = resolvers.expandedpathlist(name)
+ for i=1,#pathlist do
+ func("^"..resolvers.cleanpath(pathlist[i]))
end
end
-function table.sequenced(t,sep) -- temp here
- local s = { }
- for k, v in next, t do -- indexed?
- s[#s+1] = k .. "=" .. tostring(v)
- end
- return concat(s, sep or " | ")
+function resolvers.dowithvariable(name,func)
+ func(expandedvariable(name))
end
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapse_path(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- if resolvers[what][scheme] then
- if trace_locating then
- logs.report("fileio","handler '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+function resolvers.locateformat(name)
+ local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
+ local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
+ if fmtname == "" then
+ fmtname = resolvers.findfile(barename..".fmt")
+ fmtname = resolvers.cleanpath(fmtname)
+ end
+ if fmtname ~= "" then
+ local barename = file.removesuffix(fmtname)
+ local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
+ if lfs.isfile(luiname) then
+ return barename, luiname
+ elseif lfs.isfile(lucname) then
+ return barename, lucname
+ elseif lfs.isfile(luaname) then
+ return barename, luaname
end
- return resolvers[what][scheme](filename,filetype) -- todo: specification
- else
- return resolvers[what].tex(filename,filetype) -- todo: specification
end
+ return nil, nil
end
-function resolvers.clean_path(str)
- if str then
- str = gsub(str,"\\","/")
- str = gsub(str,"^!+","")
- str = gsub(str,"^~",resolvers.homedir)
- return str
+function resolvers.booleanvariable(str,default)
+ local b = resolvers.expansion(str)
+ if b == "" then
+ return default
else
- return nil
- end
-end
-
-function resolvers.do_with_path(name,func)
- local pathlist = resolvers.expanded_path_list(name)
- for i=1,#pathlist do
- func("^"..resolvers.clean_path(pathlist[i]))
+ b = toboolean(b)
+ return (b == nil and default) or b
end
end
-function resolvers.do_with_var(name,func)
- func(expanded_var(name))
-end
-
-function resolvers.with_files(pattern,handle)
+function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead
+ local instance = resolvers.instance
local hashes = instance.hashes
for i=1,#hashes do
local hash = hashes[i]
- local blobpath = hash.tag
local blobtype = hash.type
+ local blobpath = hash.name
if blobpath then
+ if before then
+ before(blobtype,blobpath,pattern)
+ end
local files = instance.files[blobpath]
+ local total, checked, done = 0, 0, 0
if files then
for k,v in next, files do
+ total = total + 1
if find(k,"^remap:") then
k = files[k]
- v = files[k] -- chained
+ v = k -- files[k] -- chained
end
if find(k,pattern) then
if type(v) == "string" then
- handle(blobtype,blobpath,v,k)
+ checked = checked + 1
+ if handle(blobtype,blobpath,v,k) then
+ done = done + 1
+ end
else
- for _,vv in next, v do -- indexed
- handle(blobtype,blobpath,vv,k)
+ checked = checked + #v
+ for i=1,#v do
+ if handle(blobtype,blobpath,v[i],k) then
+ done = done + 1
+ end
end
end
end
end
end
+ if after then
+ after(blobtype,blobpath,pattern,total,checked,done)
+ end
end
end
end
-function resolvers.locate_format(name)
- local barename, fmtname = gsub(name,"%.%a+$",""), ""
- if resolvers.usecache then
- local path = file.join(caches.setpath("formats")) -- maybe platform
- fmtname = file.join(path,barename..".fmt") or ""
- end
- if fmtname == "" then
- fmtname = resolvers.find_files(barename..".fmt")[1] or ""
- end
- fmtname = resolvers.clean_path(fmtname)
- if fmtname ~= "" then
- local barename = file.removesuffix(fmtname)
- local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui"
- if lfs.isfile(luiname) then
- return barename, luiname
- elseif lfs.isfile(lucname) then
- return barename, lucname
- elseif lfs.isfile(luaname) then
- return barename, luaname
- end
- end
- return nil, nil
-end
-
-function resolvers.boolean_variable(str,default)
- local b = resolvers.expansion(str)
- if b == "" then
- return default
- else
- b = toboolean(b)
- return (b == nil and default) or b
- end
-end
-
-texconfig.kpse_init = false
-
-kpse = { original = kpse } setmetatable(kpse, { __index = function(k,v) return resolvers[v] end } )
-
--- for a while
+resolvers.obsolete = resolvers.obsolete or { }
+local obsolete = resolvers.obsolete
-input = resolvers
+resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
+resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles