This is a prelude to a more extensive logging module. We no longer
provide
The parser used here is inspired by the variant discussed in the lua book, but
handles comment and processing instructions, has a different structure, provides
parent access; a first version used different trickery but was less optimized to we
went this route. First we had a find based parser, now we have an
Beware, the interface may change. For instance at, ns, tg, dt may get more verbose names. Once the code is stable we will also remove some tracing and optimize the code.
I might even decide to reimplement the parser using the latest
First a hack to enable namespace resolving. A namespace is characterized by
a
The next function associates a namespace prefix with an
The next function also registers a namespace, but this time we map a
given namespace prefix onto a registered one, using the given
Next we provide a way to turn an
A namespace in an element can be remapped onto the registered
one efficiently by using the
This version uses
Next comes the parser. The rather messy doctype definition comes in many
disguises so it is no surprice that later on have to dedicate quite some
The code may look a bit complex but this is mostly due to the fact that we resolve namespaces and attach metatables. There is only one public function:
An optional second boolean argument tells this function not to create a root element.
Valid entities are:
Packaging data in an xml like table is done with the following function. Maybe it will go away (when not used).
--ldx]]-- function xml.is_valid(root) return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er end function xml.package(tag,attributes,data) local ns, tg = match(tag,"^(.-):?([^:]+)$") local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} } setmetatable(t, mt) return t end function xml.is_valid(root) return root and not root.error end xml.errorhandler = report --[[ldx--We cannot load an
When we inject new elements, we need to convert strings to valid trees, which is what the next function does.
--ldx]]-- local no_root = { no_root = true } function xml.toxml(data) if type(data) == "string" then local root = { xmlconvert(data,no_root) } return (#root > 1 and root) or root[1] else return data end end --[[ldx--For copying a tree we use a dedicated function instead of the generic table copier. Since we know what we're dealing with we can speed up things a bit. The second argument is not to be used!
--ldx]]-- local function copy(old,tables) if old then tables = tables or { } local new = { } if not tables[old] then tables[old] = new end for k,v in next, old do new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v end local mt = getmetatable(old) if mt then setmetatable(new,mt) end return new else return { } end end xml.copy = copy --[[ldx--In
At the cost of some 25% runtime overhead you can first convert the tree to a string and then handle the lot.
--ldx]]-- -- new experimental reorganized serialize local function verbose_element(e,handlers) local handle = handlers.handle local serialize = handlers.serialize local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn local ats = eat and next(eat) and { } if ats then for k,v in next, eat do ats[#ats+1] = format('%s=%q',k,v) end end if ern and trace_entities and ern ~= ens then ens = ern end if ens ~= "" then if edt and #edt > 0 then if ats then handle("<",ens,":",etg," ",concat(ats," "),">") else handle("<",ens,":",etg,">") end for i=1,#edt do local e = edt[i] if type(e) == "string" then handle(e) else serialize(e,handlers) end end handle("",ens,":",etg,">") else if ats then handle("<",ens,":",etg," ",concat(ats," "),"/>") else handle("<",ens,":",etg,"/>") end end else if edt and #edt > 0 then if ats then handle("<",etg," ",concat(ats," "),">") else handle("<",etg,">") end for i=1,#edt do local ei = edt[i] if type(ei) == "string" then handle(ei) else serialize(ei,handlers) end end handle("",etg,">") else if ats then handle("<",etg," ",concat(ats," "),"/>") else handle("<",etg,"/>") end end end end local function verbose_pi(e,handlers) handlers.handle("",e.dt[1],"?>") end local function verbose_comment(e,handlers) handlers.handle("") end local function verbose_cdata(e,handlers) handlers.handle("") end local function verbose_doctype(e,handlers) handlers.handle("") end local function verbose_root(e,handlers) handlers.serialize(e.dt,handlers) end local function verbose_text(e,handlers) handlers.handle(e) end local function verbose_document(e,handlers) local serialize = handlers.serialize local functions = handlers.functions for i=1,#e do local ei = e[i] if type(ei) == "string" then functions["@tx@"](ei,handlers) else serialize(ei,handlers) end end end local function serialize(e,handlers,...) local initialize = handlers.initialize local finalize = handlers.finalize local functions = handlers.functions if initialize then local state = initialize(...) if not state == true then return state end end local etg = e.tg if etg then (functions[etg] or functions["@el@"])(e,handlers) -- elseif type(e) == "string" then -- functions["@tx@"](e,handlers) else functions["@dc@"](e,handlers) -- dc ? end if finalize then return finalize() end end local function xserialize(e,handlers) local functions = handlers.functions local etg = e.tg if etg then (functions[etg] or functions["@el@"])(e,handlers) -- elseif type(e) == "string" then -- functions["@tx@"](e,handlers) else functions["@dc@"](e,handlers) end end local handlers = { } local function newhandlers(settings) local t = table.copy(handlers.verbose or { }) -- merge if settings then for k,v in next, settings do if type(v) == "table" then local tk = t[k] if not tk then tk = { } t[k] = tk end for kk,vv in next, v do tk[kk] = vv end else t[k] = v end end if settings.name then handlers[settings.name] = t end end utilities.storage.mark(t) return t end local nofunction = function() end function xml.sethandlersfunction(handler,name,fnc) handler.functions[name] = fnc or nofunction end function xml.gethandlersfunction(handler,name) return handler.functions[name] end function xml.gethandlers(name) return handlers[name] end newhandlers { name = "verbose", initialize = false, -- faster than nil and mt lookup finalize = false, -- faster than nil and mt lookup serialize = xserialize, handle = print, functions = { ["@dc@"] = verbose_document, ["@dt@"] = verbose_doctype, ["@rt@"] = verbose_root, ["@el@"] = verbose_element, ["@pi@"] = verbose_pi, ["@cm@"] = verbose_comment, ["@cd@"] = verbose_cdata, ["@tx@"] = verbose_text, } } --[[ldx--How you deal with saving data depends on your preferences. For a 40 MB database file the timing on a 2.3 Core Duo are as follows (time in seconds):
Beware, these were timing with the old routine but measurements will not be that much different I guess.
--ldx]]-- -- maybe this will move to lxml-xml local result local xmlfilehandler = newhandlers { name = "file", initialize = function(name) result = io.open(name,"wb") return result end, finalize = function() result:close() return true end, handle = function(...) result:write(...) end, } -- no checking on writeability here but not faster either -- -- local xmlfilehandler = newhandlers { -- initialize = function(name) io.output(name,"wb") return true end, -- finalize = function() io.close() return true end, -- handle = io.write, -- } function xml.save(root,name) serialize(root,xmlfilehandler,name) end local result local xmlstringhandler = newhandlers { name = "string", initialize = function() result = { } return result end, finalize = function() return concat(result) end, handle = function(...) result[#result+1] = concat { ... } end } local function xmltostring(root) -- 25% overhead due to collecting if root then if type(root) == 'string' then return root else -- if next(root) then -- next is faster than type (and >0 test) return serialize(root,xmlstringhandler) or "" end end return "" end local function xmltext(root) -- inline return (root and xmltostring(root)) or "" end initialize_mt = function(root) -- redefinition mt = { __tostring = xmltext, __index = root } end xml.defaulthandlers = handlers xml.newhandlers = newhandlers xml.serialize = serialize xml.tostring = xmltostring --[[ldx--The next function operated on the content only and needs a handle function that accepts a string.
--ldx]]-- local function xmlstring(e,handle) if not handle or (e.special and e.tg ~= "@rt@") then -- nothing elseif e.tg then local edt = e.dt if edt then for i=1,#edt do xmlstring(edt[i],handle) end end else handle(e) end end xml.string = xmlstring --[[ldx--A few helpers:
--ldx]]-- function xml.settings(e) while e do local s = e.settings if s then return s else e = e.__p__ end end return nil end function xml.root(e) local r = e while e do e = e.__p__ if e then r = e end end return r end function xml.parent(root) return root.__p__ end function xml.body(root) return (root.ri and root.dt[root.ri]) or root -- not ok yet end function xml.name(root) if not root then return "" elseif root.ns == "" then return root.tg else return root.ns .. ":" .. root.tg end end --[[ldx--The next helper erases an element but keeps the table as it is, and since empty strings are not serialized (effectively) it does not harm. Copying the table would take more time. Usage:
--ldx]]-- function xml.erase(dt,k) if dt then if k then dt[k] = "" else for k=1,#dt do dt[1] = { "" } end end end end --[[ldx--The next helper assigns a tree (or string). Usage:
The next helper assigns a tree (or string). Usage:
This module can be used stand alone but also inside
If I can get in the mood I will make a variant that is XSLT compliant but I wonder if it makes sense.
--ldx]]-- --[[ldx--Expecially the lpath code is experimental, we will support some of xpath, but
only things that make sense for us; as compensation it is possible to hook in your
own functions. Apart from preprocessing content for
We've now arrived at an interesting part: accessing the tree using a subset
of
This is the main filter function. It returns whatever is asked for.
--ldx]]-- function xml.filter(root,pattern) -- no longer funny attribute handling here return applylpath(root,pattern) end -- internal (parsed) expressions.child = function(e,pattern) return applylpath(e,pattern) -- todo: cache end expressions.count = function(e,pattern) local collected = applylpath(e,pattern) -- todo: cache return (collected and #collected) or 0 end -- external expressions.oneof = function(s,...) -- slow local t = {...} for i=1,#t do if s == t[i] then return true end end return false end expressions.error = function(str) xml.errorhandler("unknown function in lpath expression",tostring(str or "?")) return false end expressions.undefined = function(s) return s == nil end expressions.quit = function(s) if s or s == nil then quit_expression = true end return true end expressions.print = function(...) print(...) return true end expressions.contains = find expressions.find = find expressions.upper = upper expressions.lower = lower expressions.number = tonumber expressions.boolean = toboolean -- user interface local function traverse(root,pattern,handle) report_lpath("use 'xml.selection' instead for '%s'",pattern) local collected = applylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] local r = e.__p__ handle(r,r.dt,e.ni) end end end local function selection(root,pattern,handle) local collected = applylpath(root,pattern) if collected then if handle then for c=1,#collected do handle(collected[c]) end else return collected end end end xml.traverse = traverse -- old method, r, d, k xml.selection = selection -- new method, simple handle -- generic function finalizer (independant namespace) local function dofunction(collected,fnc) if collected then local f = functions[fnc] if f then for c=1,#collected do f(collected[c]) end else report_lpath("unknown function '%s'",fnc) end end end finalizers.xml["function"] = dofunction finalizers.tex["function"] = dofunction -- functions expressions.text = function(e,n) local rdt = e.__p__.dt return (rdt and rdt[n]) or "" end expressions.name = function(e,n) -- ns + tg local found = false n = tonumber(n) or 0 if n == 0 then found = type(e) == "table" and e elseif n < 0 then local d, k = e.__p__.dt, e.ni for i=k-1,1,-1 do local di = d[i] if type(di) == "table" then if n == -1 then found = di break else n = n + 1 end end end else local d, k = e.__p__.dt, e.ni for i=k+1,#d,1 do local di = d[i] if type(di) == "table" then if n == 1 then found = di break else n = n - 1 end end end end if found then local ns, tg = found.rn or found.ns or "", found.tg if ns ~= "" then return ns .. ":" .. tg else return tg end else return "" end end expressions.tag = function(e,n) -- only tg if not e then return "" else local found = false n = tonumber(n) or 0 if n == 0 then found = (type(e) == "table") and e -- seems to fail elseif n < 0 then local d, k = e.__p__.dt, e.ni for i=k-1,1,-1 do local di = d[i] if type(di) == "table" then if n == -1 then found = di break else n = n + 1 end end end else local d, k = e.__p__.dt, e.ni for i=k+1,#d,1 do local di = d[i] if type(di) == "table" then if n == 1 then found = di break else n = n - 1 end end end end return (found and found.tg) or "" end end --[[ldx--Often using an iterators looks nicer in the code than passing handler
functions. The
The following helper functions best belong to the
The following functions collect elements and texts.
--ldx]]-- -- are these still needed -> lxml-cmp.lua function xml.collect(root, pattern) return xmlapplylpath(root,pattern) end function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle local collected = xmlapplylpath(root,pattern) if collected and flatten then local xmltostring = xml.tostring for c=1,#collected do collected[c] = xmltostring(collected[c].dt) end end return collected or { } end function xml.collect_tags(root, pattern, nonamespace) local collected = xmlapplylpath(root,pattern) if collected then local t, n = { }, 0 for c=1,#collected do local e = collected[c] local ns, tg = e.ns, e.tg n = n + 1 if nonamespace then t[n] = tg elseif ns == "" then t[n] = tg else t[n] = ns .. ":" .. tg end end return t end end --[[ldx--We've now arrived at the functions that manipulate the tree.
--ldx]]-- local no_root = { no_root = true } local function redo_ni(d) for k=1,#d do local dk = d[k] if type(dk) == "table" then dk.ni = k end end end local function xmltoelement(whatever,root) if not whatever then return nil end local element if type(whatever) == "string" then element = xmlinheritedconvert(whatever,root) else element = whatever -- we assume a table end if element.error then return whatever -- string end if element then end return element end xml.toelement = xmltoelement local function copiedelement(element,newparent) if type(element) == "string" then return element else element = xmlcopy(element).dt if newparent and type(element) == "table" then element.__p__ = newparent end return element end end function xml.delete(root,pattern) local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] local p = e.__p__ if p then if trace_manipulations then report('deleting',pattern,c,e) end local d = p.dt remove(d,e.ni) redo_ni(d) -- can be made faster and inlined end end end end function xml.replace(root,pattern,whatever) local element = root and xmltoelement(whatever,root) local collected = element and xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] local p = e.__p__ if p then if trace_manipulations then report('replacing',pattern,c,e) end local d = p.dt d[e.ni] = copiedelement(element,p) redo_ni(d) -- probably not needed end end end end local function wrap(e,wrapper) local t = { rn = e.rn, tg = e.tg, ns = e.ns, at = e.at, dt = e.dt, __p__ = e, } setmetatable(t,getmetatable(e)) e.rn = wrapper.rn or e.rn or "" e.tg = wrapper.tg or e.tg or "" e.ns = wrapper.ns or e.ns or "" e.at = fastcopy(wrapper.at) e.dt = { t } end function xml.wrap(root,pattern,whatever) if whatever then local wrapper = xmltoelement(whatever,root) local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] if trace_manipulations then report('wrapping',pattern,c,e) end wrap(e,wrapper) end end else wrap(root,xmltoelement(pattern)) end end local function inject_element(root,pattern,whatever,prepend) local element = root and xmltoelement(whatever,root) local collected = element and xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] local r = e.__p__ local d, k, rri = r.dt, e.ni, r.ri local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt) if edt then local be, af local cp = copiedelement(element,e) if prepend then be, af = cp, edt else be, af = edt, cp end local bn = #be for i=1,#af do bn = bn + 1 be[bn] = af[i] end if rri then r.dt[rri].dt = be else d[k].dt = be end redo_ni(d) end end end end local function insert_element(root,pattern,whatever,before) -- todo: element als functie local element = root and xmltoelement(whatever,root) local collected = element and xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] local r = e.__p__ local d, k = r.dt, e.ni if not before then k = k + 1 end insert(d,k,copiedelement(element,r)) redo_ni(d) end end end xml.insert_element = insert_element xml.insertafter = insert_element xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end xml.injectafter = inject_element xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end local function include(xmldata,pattern,attribute,recursive,loaddata) -- parse="text" (default: xml), encoding="" (todo) -- attribute = attribute or 'href' pattern = pattern or 'include' loaddata = loaddata or io.loaddata local collected = xmlapplylpath(xmldata,pattern) if collected then for c=1,#collected do local ek = collected[c] local name = nil local ekdt = ek.dt local ekat = ek.at local epdt = ek.__p__.dt if not attribute or attribute == "" then name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str end if not name then for a in gmatch(attribute or "href","([^|]+)") do name = ekat[a] if name then break end end end local data = (name and name ~= "" and loaddata(name)) or "" if data == "" then epdt[ek.ni] = "" -- xml.empty(d,k) elseif ekat["parse"] == "text" then -- for the moment hard coded epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data) else local xi = xmlinheritedconvert(data,xmldata) if not xi then epdt[ek.ni] = "" -- xml.empty(d,k) else if recursive then include(xi,pattern,attribute,recursive,loaddata) end epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi) end end end end end xml.include = include local function stripelement(e,nolines,anywhere) local edt = e.dt if edt then if anywhere then local t, n = { }, 0 for e=1,#edt do local str = edt[e] if type(str) ~= "string" then n = n + 1 t[n] = str elseif str ~= "" then -- todo: lpeg for each case if nolines then str = gsub(str,"%s+"," ") end str = gsub(str,"^%s*(.-)%s*$","%1") if str ~= "" then n = n + 1 t[n] = str end end end e.dt = t else -- we can assume a regular sparse xml table with no successive strings -- otherwise we should use a while loop if #edt > 0 then -- strip front local str = edt[1] if type(str) ~= "string" then -- nothing elseif str == "" then remove(edt,1) else if nolines then str = gsub(str,"%s+"," ") end str = gsub(str,"^%s+","") if str == "" then remove(edt,1) else edt[1] = str end end end local nedt = #edt if nedt > 0 then -- strip end local str = edt[nedt] if type(str) ~= "string" then -- nothing elseif str == "" then remove(edt) else if nolines then str = gsub(str,"%s+"," ") end str = gsub(str,"%s+$","") if str == "" then remove(edt) else edt[nedt] = str end end end end end return e -- convenient end xml.stripelement = stripelement function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now if collected then for i=1,#collected do stripelement(collected[i],nolines,anywhere) end end end local function renamespace(root, oldspace, newspace) -- fast variant local ndt = #root.dt for i=1,ndt or 0 do local e = root[i] if type(e) == "table" then if e.ns == oldspace then e.ns = newspace if e.rn then e.rn = newspace end end local edt = e.dt if edt then renamespace(edt, oldspace, newspace) end end end end xml.renamespace = renamespace function xml.remaptag(root, pattern, newtg) local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do collected[c].tg = newtg end end end function xml.remapnamespace(root, pattern, newns) local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do collected[c].ns = newns end end end function xml.checknamespace(root, pattern, newns) local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] if (not e.rn or e.rn == "") and e.ns == "" then e.rn = newns end end end end function xml.remapname(root, pattern, newtg, newns, newrn) local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] e.tg, e.ns, e.rn = newtg, newns, newrn end end end --[[ldx--Here are a few synonyms.
--ldx]]-- xml.all = xml.each xml.insert = xml.insertafter xml.inject = xml.injectafter xml.after = xml.insertafter xml.before = xml.insertbefore xml.process = xml.each -- obsolete xml.obsolete = xml.obsolete or { } local obsolete = xml.obsolete xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect xml.delete_element = xml.delete obsolete.delete_element = xml.delete xml.replace_element = xml.replace obsolete.replace_element = xml.replacet xml.each_element = xml.each obsolete.each_element = xml.each xml.process_elements = xml.process obsolete.process_elements = xml.process xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts xml.inject_element = xml.inject obsolete.inject_element = xml.inject xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['lxml-xml'] = { version = 1.001, comment = "this module is the basis for the lxml-* ones", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local xml = xml local finalizers = xml.finalizers.xml local xmlfilter = xml.filter -- we could inline this one for speed local xmltostring = xml.tostring local xmlserialize = xml.serialize local xmlcollected = xml.collected local function first(collected) -- wrong ? return collected and collected[1] end local function last(collected) return collected and collected[#collected] end local function all(collected) return collected end local reverse = table.reversed local function attribute(collected,name) if collected and #collected > 0 then local at = collected[1].at return at and at[name] end end local function att(id,name) local at = id.at return at and at[name] end local function count(collected) return (collected and #collected) or 0 end local function position(collected,n) if collected then n = tonumber(n) or 0 if n < 0 then return collected[#collected + n + 1] elseif n > 0 then return collected[n] else return collected[1].mi or 0 end end end local function match(collected) return (collected and collected[1].mi) or 0 -- match end local function index(collected) if collected then return collected[1].ni end end local function attributes(collected,arguments) if collected then local at = collected[1].at if arguments then return at[arguments] elseif next(at) then return at -- all of them end end end local function chainattribute(collected,arguments) -- todo: optional levels if collected then local e = collected[1] while e do local at = e.at if at then local a = at[arguments] if a then return a end else break -- error end e = e.__p__ end end return "" end local function raw(collected) -- hybrid if collected then local e = collected[1] or collected return (e and xmlserialize(e)) or "" -- only first as we cannot concat function else return "" end end local function text(collected) -- hybrid if collected then local e = collected[1] or collected return (e and xmltostring(e.dt)) or "" else return "" end end local function texts(collected) if collected then local t, n = { }, 0 for c=1,#collected do local e = collection[c] if e and e.dt then n = n + 1 t[n] = e.dt end end return t end end local function tag(collected,n) if collected then local c if n == 0 or not n then c = collected[1] elseif n > 1 then c = collected[n] else c = collected[#collected-n+1] end return c and c.tg end end local function name(collected,n) if collected then local c if n == 0 or not n then c = collected[1] elseif n > 1 then c = collected[n] else c = collected[#collected-n+1] end if c then if c.ns == "" then return c.tg else return c.ns .. ":" .. c.tg end end end end local function tags(collected,nonamespace) if collected then local t, n = { }, 0 for c=1,#collected do local e = collected[c] local ns, tg = e.ns, e.tg n = n + 1 if nonamespace or ns == "" then t[n] = tg else t[n] = ns .. ":" .. tg end end return t end end local function empty(collected) if collected then for c=1,#collected do local e = collected[c] if e then local edt = e.dt if edt then local n = #edt if n == 1 then local edk = edt[1] local typ = type(edk) if typ == "table" then return false elseif edk ~= "" then -- maybe an extra tester for spacing only return false end elseif n > 1 then return false end end end end end return true end finalizers.first = first finalizers.last = last finalizers.all = all finalizers.reverse = reverse finalizers.elements = all finalizers.default = all finalizers.attribute = attribute finalizers.att = att finalizers.count = count finalizers.position = position finalizers.match = match finalizers.index = index finalizers.attributes = attributes finalizers.chainattribute = chainattribute finalizers.text = text finalizers.texts = texts finalizers.tag = tag finalizers.name = name finalizers.tags = tags finalizers.empty = empty -- shortcuts -- we could support xmlfilter(id,pattern,first) function xml.first(id,pattern) return first(xmlfilter(id,pattern)) end function xml.last(id,pattern) return last(xmlfilter(id,pattern)) end function xml.count(id,pattern) return count(xmlfilter(id,pattern)) end function xml.attribute(id,pattern,a,default) return attribute(xmlfilter(id,pattern),a,default) end function xml.raw(id,pattern) if pattern then return raw(xmlfilter(id,pattern)) else return raw(id) end end function xml.text(id,pattern) if pattern then -- return text(xmlfilter(id,pattern)) local collected = xmlfilter(id,pattern) return (collected and xmltostring(collected[1].dt)) or "" elseif id then -- return text(id) return xmltostring(id.dt) or "" else return "" end end xml.content = text function xml.position(id,pattern,n) -- element return position(xmlfilter(id,pattern),n) end function xml.match(id,pattern) -- number return match(xmlfilter(id,pattern)) end function xml.empty(id,pattern) return empty(xmlfilter(id,pattern)) end xml.all = xml.filter xml.index = xml.position xml.found = xml.filter -- a nice one: local function totable(x) local t = { } for e in xmlcollected(x[1] or x,"/*") do t[e.tg] = xmltostring(e.dt) or "" end return next(t) and t or nil end xml.table = totable finalizers.table = totable end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-ini'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", } local gsub, find, gmatch = string.gsub, string.find, string.gmatch local concat = table.concat local next, type = next, type local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) local report_initialization = logs.reporter("resolvers","initialization") local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv -- The code here used to be part of a data-res but for convenience -- we now split it over multiple files. As this file is now the -- starting point we introduce resolvers here. resolvers = resolvers or { } local resolvers = resolvers -- We don't want the kpse library to kick in. Also, we want to be able to -- execute programs. Control over execution is implemented later. texconfig.kpse_init = false texconfig.shell_escape = 't' kpse = { original = kpse } setmetatable(kpse, { __index = function(kp,name) report_initialization("fatal error: kpse library is accessed (key: %s)",name) os.exit() end } ) -- First we check a couple of environment variables. Some might be -- set already but we need then later on. We start with the system -- font path. do local osfontdir = osgetenv("OSFONTDIR") if osfontdir and osfontdir ~= "" then -- ok elseif osname == "windows" then ossetenv("OSFONTDIR","c:/windows/fonts//") elseif osname == "macosx" then ossetenv("OSFONTDIR","$HOME/Library/Fonts//;/Library/Fonts//;/System/Library/Fonts//") end end -- Next comes the user's home path. We need this as later on we have -- to replace ~ with its value. do local homedir = osgetenv(ostype == "windows" and 'USERPROFILE' or 'HOME') or '' if not homedir or homedir == "" then homedir = string.char(127) -- we need a value, later we wil trigger on it end homedir = file.collapsepath(homedir) ossetenv("HOME", homedir) -- can be used in unix cnf files ossetenv("USERPROFILE",homedir) -- can be used in windows cnf files environment.homedir = homedir end -- The following code sets the name of the own binary and its -- path. This is fallback code as we have os.selfdir now. do local args = environment.originalarguments or arg -- this needs a cleanup local ownbin = environment.ownbin or args[-2] or arg[-2] or args[-1] or arg[-1] or arg[0] or "luatex" local ownpath = environment.ownpath or os.selfdir ownbin = file.collapsepath(ownbin) ownpath = file.collapsepath(ownpath) if not ownpath or ownpath == "" or ownpath == "unset" then ownpath = args[-1] or arg[-1] ownpath = ownpath and filedirname(gsub(ownpath,"\\","/")) if not ownpath or ownpath == "" then ownpath = args[-0] or arg[-0] ownpath = ownpath and filedirname(gsub(ownpath,"\\","/")) end local binary = ownbin if not ownpath or ownpath == "" then ownpath = ownpath and filedirname(binary) end if not ownpath or ownpath == "" then if os.binsuffix ~= "" then binary = file.replacesuffix(binary,os.binsuffix) end local path = osgetenv("PATH") if path then for p in gmatch(path,"[^"..io.pathseparator.."]+") do local b = filejoin(p,binary) if lfs.isfile(b) then -- we assume that after changing to the path the currentdir function -- resolves to the real location and use this side effect here; this -- trick is needed because on the mac installations use symlinks in the -- path instead of real locations local olddir = lfs.currentdir() if lfs.chdir(p) then local pp = lfs.currentdir() if trace_locating and p ~= pp then report_initialization("following symlink '%s' to '%s'",p,pp) end ownpath = pp lfs.chdir(olddir) else if trace_locating then report_initialization("unable to check path '%s'",p) end ownpath = p end break end end end end if not ownpath or ownpath == "" then ownpath = "." report_initialization("forcing fallback ownpath .") elseif trace_locating then report_initialization("using ownpath '%s'",ownpath) end end environment.ownbin = ownbin environment.ownpath = ownpath end resolvers.ownpath = environment.ownpath function resolvers.getownpath() return environment.ownpath end -- The self variables permit us to use only a few (or even no) -- environment variables. do local ownpath = environment.ownpath or dir.current() if ownpath then ossetenv('SELFAUTOLOC', file.collapsepath(ownpath)) ossetenv('SELFAUTODIR', file.collapsepath(ownpath .. "/..")) ossetenv('SELFAUTOPARENT', file.collapsepath(ownpath .. "/../..")) else report_initialization("error: unable to locate ownpath") os.exit() end end -- The running os: -- todo: check is context sits here os.platform is more trustworthy -- that the bin check as mtx-update runs from another path local texos = environment.texos or osgetenv("TEXOS") local texmfos = environment.texmfos or osgetenv('SELFAUTODIR') if not texos or texos == "" then texos = file.basename(texmfos) end ossetenv('TEXMFOS', texmfos) -- full bin path ossetenv('TEXOS', texos) -- partial bin parent ossetenv('SELFAUTOSYSTEM',os.platform) -- bonus environment.texos = texos environment.texmfos = texmfos -- The current root: local texroot = environment.texroot or osgetenv("TEXROOT") if not texroot or texroot == "" then texroot = osgetenv('SELFAUTOPARENT') ossetenv('TEXROOT',texroot) end environment.texroot = file.collapsepath(texroot) -- Tracing. Todo ... function resolvers.settrace(n) -- no longer number but: 'locating' or 'detail' if n then trackers.disable("resolvers.*") trackers.enable("resolvers."..n) end end resolvers.settrace(osgetenv("MTX_INPUT_TRACE")) -- todo: -- if profiler and osgetenv("MTX_PROFILE_RUN") == "YES" then -- profiler.start("luatex-profile.log") -- end -- a forward definition if not resolvers.resolve then function resolvers.resolve (s) return s end function resolvers.unresolve(s) return s end function resolvers.repath (s) return s end end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-exp'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", } local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower local concat, sort = table.concat, table.sort local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S local type, next = type, next local ostype = os.type local collapsepath = file.collapsepath local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) local report_expansions = logs.reporter("resolvers","expansions") local resolvers = resolvers -- As this bit of code is somewhat special it gets its own module. After -- all, when working on the main resolver code, I don't want to scroll -- past this every time. See data-obs.lua for the gsub variant. -- {a,b,c,d} -- a,b,c/{p,q,r},d -- a,b,c/{p,q,r}/d/{x,y,z}// -- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} -- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} -- a{b,c}{d,e}f -- {a,b,c,d} -- {a,b,c/{p,q,r},d} -- {a,b,c/{p,q,r}/d/{x,y,z}//} -- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} -- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} -- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} local function f_first(a,b) local t, n = { }, 0 for s in gmatch(b,"[^,]+") do n = n + 1 ; t[n] = a .. s end return concat(t,",") end local function f_second(a,b) local t, n = { }, 0 for s in gmatch(a,"[^,]+") do n = n + 1 ; t[n] = s .. b end return concat(t,",") end local function f_both(a,b) local t, n = { }, 0 for sa in gmatch(a,"[^,]+") do for sb in gmatch(b,"[^,]+") do n = n + 1 ; t[n] = sa .. sb end end return concat(t,",") end local left = P("{") local right = P("}") local var = P((1 - S("{}" ))^0) local set = P((1 - S("{},"))^0) local other = P(1) local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 ) local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 ) local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 ) local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 ) local stripper_1 = lpeg.stripper ("{}@") local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, } local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise). if trace_expansions then report_expansions("expanding variable '%s'",str) end local t, ok, done = newlist or { }, false, false local n = #t str = lpegmatch(replacer_1,str) repeat local old = str repeat local old = str ; str = lpegmatch(l_first, str) until old == str repeat local old = str ; str = lpegmatch(l_second,str) until old == str repeat local old = str ; str = lpegmatch(l_both, str) until old == str repeat local old = str ; str = lpegmatch(l_rest, str) until old == str until old == str -- or not find(str,"{") str = lpegmatch(stripper_1,str) if validate then for s in gmatch(str,"[^,]+") do s = validate(s) if s then n = n + 1 ; t[n] = s end end else for s in gmatch(str,"[^,]+") do n = n + 1 ; t[n] = s end end if trace_expansions then for k=1,#t do report_expansions("% 4i: %s",k,t[k]) end end return t end -- We could make the previous one public. local function validate(s) s = collapsepath(s) -- already keeps the // return s ~= "" and not find(s,"^!*unset/*$") and s end resolvers.validatedpath = validate -- keeps the trailing // function resolvers.expandedpathfromlist(pathlist) local newlist = { } for k=1,#pathlist do splitpathexpr(pathlist[k],newlist,validate) end return newlist end local cleanup = lpeg.replacer { { "!" , "" }, { "\\" , "/" }, } local homedir function resolvers.cleanpath(str) if not homedir then homedir = lpegmatch(cleanup,environment.homedir or "") if homedir == string.char(127) or homedir == "" or not lfs.isdir(homedir) then if trace_expansions then report_expansions("no home dir set, ignoring dependent paths") end function resolvers.cleanpath(str) if find(str,"~") then return "" -- special case else return str and lpegmatch(cleanup,str) end end else cleanup = lpeg.replacer { { "!" , "" }, { "\\" , "/" }, { "~" , homedir }, } function resolvers.cleanpath(str) return str and lpegmatch(cleanup,str) end end end return resolvers.cleanpath(str) end -- This one strips quotes and funny tokens. local expandhome = P("~") / "$HOME" -- environment.homedir local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/"" local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/"" local dostring = (expandhome + 1 )^0 local stripper = Cs( lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer ) function resolvers.checkedvariable(str) -- assumes str is a string return lpegmatch(stripper,str) or str end -- The path splitter: -- A config (optionally) has the paths split in tables. Internally -- we join them and split them after the expansion has taken place. This -- is more convenient. local cache = { } ---- splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add , local splitter = Ct(lpeg.splitat(";")) -- as we move towards urls, prefixes and use tables we no longer do : local backslashswapper = lpeg.replacer("\\","/") local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification } if str then local found = cache[str] if not found then if str == "" then found = { } else local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined found = { } local noffound = 0 for i=1,#split do local s = split[i] if not find(s,"^{*unset}*") then noffound = noffound + 1 found[noffound] = s end end if trace_expansions then report_expansions("splitting path specification '%s'",str) for k=1,noffound do report_expansions("% 4i: %s",k,found[k]) end end cache[str] = found end end return found end end resolvers.splitconfigurationpath = splitconfigurationpath function resolvers.splitpath(str) if type(str) == 'table' then return str else return splitconfigurationpath(str) end end function resolvers.joinpath(str) if type(str) == 'table' then return file.joinpath(str) else return str end end -- The next function scans directories and returns a hash where the -- entries are either strings or tables. -- starting with . or .. etc or funny char local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) local attributes, directory = lfs.attributes, lfs.dir local function scan(files,spec,path,n,m,r) local full = (path == "" and spec) or (spec .. path .. '/') local dirs, nofdirs = { }, 0 for name in directory(full) do if not lpegmatch(weird,name) then local mode = attributes(full..name,'mode') if mode == 'file' then n = n + 1 local f = files[name] if f then if type(f) == 'string' then files[name] = { f, path } else f[#f+1] = path end else -- probably unique anyway files[name] = path local lower = lower(name) if name ~= lower then files["remap:"..lower] = name r = r + 1 end end elseif mode == 'directory' then m = m + 1 nofdirs = nofdirs + 1 if path ~= "" then dirs[nofdirs] = path..'/'..name else dirs[nofdirs] = name end end end end if nofdirs > 0 then sort(dirs) for i=1,nofdirs do files, n, m, r = scan(files,spec,dirs[i],n,m,r) end end return files, n, m, r end function resolvers.scanfiles(path,branch) if trace_locating then report_expansions("scanning path '%s', branch '%s'",path, branch or path) end local realpath = resolvers.resolve(path) -- no shortcut local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) files.__path__ = path -- can be selfautoparent:texmf-whatever files.__files__ = n files.__directories__ = m files.__remappings__ = r if trace_locating then report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) end return files end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-env'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", } local lower, gsub = string.lower, string.gsub local resolvers = resolvers local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex local fileextname = file.extname local formats = allocate() local suffixes = allocate() local dangerous = allocate() local suffixmap = allocate() resolvers.formats = formats resolvers.suffixes = suffixes resolvers.dangerous = dangerous resolvers.suffixmap = suffixmap local relations = allocate { -- todo: handlers also here core = { ofm = { -- will become obsolete names = { "ofm", "omega font metric", "omega font metrics" }, variable = 'OFMFONTS', suffixes = { 'ofm', 'tfm' }, }, ovf = { -- will become obsolete names = { "ovf", "omega virtual font", "omega virtual fonts" }, variable = 'OVFFONTS', suffixes = { 'ovf', 'vf' }, }, tfm = { names = { "tfm", "tex font metric", "tex font metrics" }, variable = 'TFMFONTS', suffixes = { 'tfm' }, }, vf = { names = { "vf", "virtual font", "virtual fonts" }, variable = 'VFFONTS', suffixes = { 'vf' }, }, otf = { names = { "otf", "opentype", "opentype font", "opentype fonts"}, variable = 'OPENTYPEFONTS', suffixes = { 'otf' }, }, ttf = { names = { "ttf", "truetype", "truetype font", "truetype fonts", "truetype collection", "truetype collections", "truetype dictionary", "truetype dictionaries" }, variable = 'TTFONTS', suffixes = { 'ttf', 'ttc', 'dfont' }, }, afm = { names = { "afm", "adobe font metric", "adobe font metrics" }, variable = "AFMFONTS", suffixes = { "afm" }, }, pfb = { names = { "pfb", "type1", "type 1", "type1 font", "type 1 font", "type1 fonts", "type 1 fonts" }, variable = 'T1FONTS', suffixes = { 'pfb', 'pfa' }, }, fea = { names = { "fea", "font feature", "font features", "font feature file", "font feature files" }, variable = 'FONTFEATURES', suffixes = { 'fea' }, }, cid = { names = { "cid", "cid map", "cid maps", "cid file", "cid files" }, variable = 'FONTCIDMAPS', suffixes = { 'cid', 'cidmap' }, }, fmt = { names = { "fmt", "format", "tex format" }, variable = 'TEXFORMATS', suffixes = { 'fmt' }, }, mem = { -- will become obsolete names = { 'mem', "metapost format" }, variable = 'MPMEMS', suffixes = { 'mem' }, }, mp = { names = { "mp" }, variable = 'MPINPUTS', suffixes = { 'mp' }, }, tex = { names = { "tex" }, variable = 'TEXINPUTS', suffixes = { 'tex', "mkiv", "mkiv", "mkii" }, }, icc = { names = { "icc", "icc profile", "icc profiles" }, variable = 'ICCPROFILES', suffixes = { 'icc' }, }, texmfscripts = { names = { "texmfscript", "texmfscripts", "script", "scripts" }, variable = 'TEXMFSCRIPTS', suffixes = { 'rb', 'pl', 'py' }, }, lua = { names = { "lua" }, variable = 'LUAINPUTS', suffixes = { 'lua', 'luc', 'tma', 'tmc' }, }, lib = { names = { "lib" }, variable = 'CLUAINPUTS', suffixes = os.libsuffix and { os.libsuffix } or { 'dll', 'so' }, }, bib = { names = { 'bib' }, suffixes = { 'bib' }, }, bst = { names = { 'bst' }, suffixes = { 'bst' }, }, fontconfig = { names = { 'fontconfig', 'fontconfig file', 'fontconfig files' }, variable = 'FONTCONFIG_PATH', }, }, obsolete = { enc = { names = { "enc", "enc files", "enc file", "encoding files", "encoding file" }, variable = 'ENCFONTS', suffixes = { 'enc' }, }, map = { names = { "map", "map files", "map file" }, variable = 'TEXFONTMAPS', suffixes = { 'map' }, }, lig = { names = { "lig files", "lig file", "ligature file", "ligature files" }, variable = 'LIGFONTS', suffixes = { 'lig' }, }, opl = { names = { "opl" }, variable = 'OPLFONTS', suffixes = { 'opl' }, }, ovp = { names = { "ovp" }, variable = 'OVPFONTS', suffixes = { 'ovp' }, }, }, kpse = { -- subset base = { names = { 'base', "metafont format" }, variable = 'MFBASES', suffixes = { 'base', 'bas' }, }, cmap = { names = { 'cmap', 'cmap files', 'cmap file' }, variable = 'CMAPFONTS', suffixes = { 'cmap' }, }, cnf = { names = { 'cnf' }, suffixes = { 'cnf' }, }, web = { names = { 'web' }, suffixes = { 'web', 'ch' } }, cweb = { names = { 'cweb' }, suffixes = { 'w', 'web', 'ch' }, }, gf = { names = { 'gf' }, suffixes = { 'This module deals with caching data. It sets up the paths and implements loaders and savers for tables. Best is to set the following variable. When not set, the usual paths will be checked. Personally I prefer the (users) temporary path.
TEXMFCACHE=$TMP;$TEMP;$TMPDIR;$TEMPDIR;$HOME;$TEXMFVAR;$VARTEXMF;.Currently we do no locking when we write files. This is no real problem because most caching involves fonts and the chance of them being written at the same time is small. We also need to extend luatools with a recache feature.
--ldx]]-- local format, lower, gsub, concat = string.format, string.lower, string.gsub, table.concat local mkdirs, isdir = dir.mkdirs, lfs.isdir local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local report_caches = logs.reporter("resolvers","caches") local report_resolvers = logs.reporter("resolvers","caching") local resolvers = resolvers -- intermezzo local directive_cleanup = false directives.register("system.compile.cleanup", function(v) directive_cleanup = v end) local directive_strip = true directives.register("system.compile.strip", function(v) directive_strip = v end) local compile = utilities.lua.compile function utilities.lua.compile(luafile,lucfile,cleanup,strip) if cleanup == nil then cleanup = directive_cleanup end if strip == nil then strip = directive_strip end return compile(luafile,lucfile,cleanup,strip) end -- end of intermezzo caches = caches or { } local caches = caches caches.base = caches.base or "luatex-cache" caches.more = caches.more or "context" caches.direct = false -- true is faster but may need huge amounts of memory caches.tree = false caches.force = true caches.ask = false caches.relocate = false caches.defaults = { "TMPDIR", "TEMPDIR", "TMP", "TEMP", "HOME", "HOMEPATH" } local writable, readables, usedreadables = nil, { }, { } -- we could use a metatable for writable and readable but not yet local function identify() -- Combining the loops makes it messy. First we check the format cache path -- and when the last component is not present we try to create it. local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE") if texmfcaches then for k=1,#texmfcaches do local cachepath = texmfcaches[k] if cachepath ~= "" then cachepath = resolvers.resolve(cachepath) cachepath = resolvers.cleanpath(cachepath) cachepath = file.collapsepath(cachepath) local valid = isdir(cachepath) if valid then if file.is_readable(cachepath) then readables[#readables+1] = cachepath if not writable and file.is_writable(cachepath) then writable = cachepath end end elseif not writable and caches.force then local cacheparent = file.dirname(cachepath) if file.is_writable(cacheparent) then if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then mkdirs(cachepath) if isdir(cachepath) and file.is_writable(cachepath) then report_caches("created: %s",cachepath) writable = cachepath readables[#readables+1] = cachepath end end end end end end end -- As a last resort we check some temporary paths but this time we don't -- create them. local texmfcaches = caches.defaults if texmfcaches then for k=1,#texmfcaches do local cachepath = texmfcaches[k] cachepath = resolvers.expansion(cachepath) -- was getenv if cachepath ~= "" then cachepath = resolvers.resolve(cachepath) cachepath = resolvers.cleanpath(cachepath) local valid = isdir(cachepath) if valid and file.is_readable(cachepath) then if not writable and file.is_writable(cachepath) then readables[#readables+1] = cachepath writable = cachepath break end end end end end -- Some extra checking. If we have no writable or readable path then we simply -- quit. if not writable then report_caches("fatal error: there is no valid writable cache path defined") os.exit() elseif #readables == 0 then report_caches("fatal error: there is no valid readable cache path defined") os.exit() end -- why here writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case -- moved here local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree if tree then caches.tree = tree writable = mkdirs(writable,base,more,tree) for i=1,#readables do readables[i] = file.join(readables[i],base,more,tree) end else writable = mkdirs(writable,base,more) for i=1,#readables do readables[i] = file.join(readables[i],base,more) end end -- end if trace_cache then for i=1,#readables do report_caches("using readable path '%s' (order %s)",readables[i],i) end report_caches("using writable path '%s'",writable) end identify = function() return writable, readables end return writable, readables end function caches.usedpaths() local writable, readables = identify() if #readables > 1 then local result = { } for i=1,#readables do local readable = readables[i] if usedreadables[i] or readable == writable then result[#result+1] = format("readable: '%s' (order %s)",readable,i) end end result[#result+1] = format("writable: '%s'",writable) return result else return writable end end function caches.configfiles() return table.concat(resolvers.instance.specification,";") end function caches.hashed(tree) tree = gsub(tree,"\\$","/") tree = gsub(tree,"/+$","") tree = lower(tree) local hash = md5.hex(tree) if trace_cache or trace_locating then report_caches("hashing tree %s, hash %s",tree,hash) end return hash end function caches.treehash() local tree = caches.configfiles() if not tree or tree == "" then return false else return caches.hashed(tree) end end local r_cache, w_cache = { }, { } -- normally w in in r but who cares local function getreadablepaths(...) -- we can optimize this as we have at most 2 tags local tags = { ... } local hash = concat(tags,"/") local done = r_cache[hash] if not done then local writable, readables = identify() -- exit if not found if #tags > 0 then done = { } for i=1,#readables do done[i] = file.join(readables[i],...) end else done = readables end r_cache[hash] = done end return done end local function getwritablepath(...) local tags = { ... } local hash = concat(tags,"/") local done = w_cache[hash] if not done then local writable, readables = identify() -- exit if not found if #tags > 0 then done = mkdirs(writable,...) else done = writable end w_cache[hash] = done end return done end caches.getreadablepaths = getreadablepaths caches.getwritablepath = getwritablepath function caches.getfirstreadablefile(filename,...) local rd = getreadablepaths(...) for i=1,#rd do local path = rd[i] local fullname = file.join(path,filename) if file.is_readable(fullname) then usedreadables[i] = true return fullname, path end end return caches.setfirstwritablefile(filename,...) end function caches.setfirstwritablefile(filename,...) local wr = getwritablepath(...) local fullname = file.join(wr,filename) return fullname, wr end function caches.define(category,subcategory) -- for old times sake return function() return getwritablepath(category,subcategory) end end function caches.setluanames(path,name) return path .. "/" .. name .. ".tma", path .. "/" .. name .. ".tmc" end function caches.loaddata(readables,name) if type(readables) == "string" then readables = { readables } end for i=1,#readables do local path = readables[i] local tmaname, tmcname = caches.setluanames(path,name) local loader = loadfile(tmcname) or loadfile(tmaname) if loader then loader = loader() collectgarbage("step") return loader end end return false end function caches.is_writable(filepath,filename) local tmaname, tmcname = caches.setluanames(filepath,filename) return file.is_writable(tmaname) end local saveoptions = { compact = true } function caches.savedata(filepath,filename,data,raw) local tmaname, tmcname = caches.setluanames(filepath,filename) local reduce, simplify = true, true if raw then reduce, simplify = false, false end data.cache_uuid = os.uuid() if caches.direct then file.savedata(tmaname,table.serialize(data,true,saveoptions)) else table.tofile(tmaname,data,true,saveoptions) end utilities.lua.compile(tmaname,tmcname) end -- moved from data-res: local content_state = { } function caches.contentstate() return content_state or { } end function caches.loadcontent(cachename,dataname) local name = caches.hashed(cachename) local full, path = caches.getfirstreadablefile(name ..".lua","trees") local filename = file.join(path,name) local blob = loadfile(filename .. ".luc") or loadfile(filename .. ".lua") if blob then local data = blob() if data and data.content then if data.type == dataname then if data.version == resolvers.cacheversion then content_state[#content_state+1] = data.uuid if trace_locating then report_resolvers("loading '%s' for '%s' from '%s'",dataname,cachename,filename) end return data.content else report_resolvers("skipping '%s' for '%s' from '%s' (version mismatch)",dataname,cachename,filename) end else report_resolvers("skipping '%s' for '%s' from '%s' (datatype mismatch)",dataname,cachename,filename) end elseif trace_locating then report_resolvers("skipping '%s' for '%s' from '%s' (no content)",dataname,cachename,filename) end elseif trace_locating then report_resolvers("skipping '%s' for '%s' from '%s' (invalid file)",dataname,cachename,filename) end end function caches.collapsecontent(content) for k, v in next, content do if type(v) == "table" and #v == 1 then content[k] = v[1] end end end function caches.savecontent(cachename,dataname,content) local name = caches.hashed(cachename) local full, path = caches.setfirstwritablefile(name ..".lua","trees") local filename = file.join(path,name) -- is full local luaname, lucname = filename .. ".lua", filename .. ".luc" if trace_locating then report_resolvers("preparing '%s' for '%s'",dataname,cachename) end local data = { type = dataname, root = cachename, version = resolvers.cacheversion, date = os.date("%Y-%m-%d"), time = os.date("%H:%M:%S"), content = content, uuid = os.uuid(), } local ok = io.savedata(luaname,table.serialize(data,true)) if ok then if trace_locating then report_resolvers("category '%s', cachename '%s' saved in '%s'",dataname,cachename,luaname) end if utilities.lua.compile(luaname,lucname) then if trace_locating then report_resolvers("'%s' compiled to '%s'",dataname,lucname) end return true else if trace_locating then report_resolvers("compiling failed for '%s', deleting file '%s'",dataname,lucname) end os.remove(lucname) end elseif trace_locating then report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname) end end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-met'] = { version = 1.100, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local find, format = string.find, string.format local sequenced = table.sequenced local addurlscheme, urlhashed = url.addscheme, url.hashed local trace_locating = false trackers.register("resolvers.locating", function(v) trace_methods = v end) trackers.register("resolvers.methods", function(v) trace_methods = v end) local report_methods = logs.reporter("resolvers","methods") local allocate = utilities.storage.allocate local resolvers = resolvers local registered = { } local function splitmethod(filename) -- todo: filetype in specification if not filename then return { scheme = "unknown", original = filename } end if type(filename) == "table" then return filename -- already split end filename = file.collapsepath(filename) if not find(filename,"://") then return { scheme = "file", path = filename, original = filename, filename = filename } end local specification = url.hashed(filename) if not specification.scheme or specification.scheme == "" then return { scheme = "file", path = filename, original = filename, filename = filename } else return specification end end resolvers.splitmethod = splitmethod -- bad name but ok -- the second argument is always analyzed (saves time later on) and the original -- gets passed as original but also as argument local function methodhandler(what,first,...) -- filename can be nil or false local method = registered[what] if method then local how, namespace = method.how, method.namespace if how == "uri" or how == "url" then local specification = splitmethod(first) local scheme = specification.scheme local resolver = namespace and namespace[scheme] if resolver then if trace_methods then report_methods("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first) end return resolver(specification,...) else resolver = namespace.default or namespace.file if resolver then if trace_methods then report_methods("resolver: method=%s, how=%s, default, argument=%s",what,how,first) end return resolver(specification,...) elseif trace_methods then report_methods("resolver: method=%s, how=%s, no handler",what,how) end end elseif how == "tag" then local resolver = namespace and namespace[first] if resolver then if trace_methods then report_methods("resolver: method=%s, how=%s, tag=%s",what,how,first) end return resolver(...) else resolver = namespace.default or namespace.file if resolver then if trace_methods then report_methods("resolver: method=%s, how=%s, default",what,how) end return resolver(...) elseif trace_methods then report_methods("resolver: method=%s, how=%s, unknown",what,how) end end end else report_methods("resolver: method=%s, unknown",what) end end resolvers.methodhandler = methodhandler function resolvers.registermethod(name,namespace,how) registered[name] = { how = how or "tag", namespace = namespace } namespace["byscheme"] = function(scheme,filename,...) if scheme == "file" then return methodhandler(name,filename,...) else return methodhandler(name,addurlscheme(filename,scheme),...) end end end local concatinators = allocate { notfound = file.join } -- concatinate paths local locators = allocate { notfound = function() end } -- locate databases local hashers = allocate { notfound = function() end } -- load databases local generators = allocate { notfound = function() end } -- generate databases resolvers.concatinators = concatinators resolvers.locators = locators resolvers.hashers = hashers resolvers.generators = generators local registermethod = resolvers.registermethod registermethod("concatinators",concatinators,"tag") registermethod("locators", locators, "uri") registermethod("hashers", hashers, "uri") registermethod("generators", generators, "uri") end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-res'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files", } -- In practice we will work within one tds tree, but i want to keep -- the option open to build tools that look at multiple trees, which is -- why we keep the tree specific data in a table. We used to pass the -- instance but for practical purposes we now avoid this and use a -- instance variable. We always have one instance active (sort of global). -- todo: cache:/// home:/// local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys local next, type, rawget = next, type, rawget local os = os local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local filedirname = file.dirname local filebasename = file.basename local fileextname = file.extname local filejoin = file.join local collapsepath = file.collapsepath local joinpath = file.joinpath local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) local report_resolving = logs.reporter("resolvers","resolving") local resolvers = resolvers local expandedpathfromlist = resolvers.expandedpathfromlist local checkedvariable = resolvers.checkedvariable local splitconfigurationpath = resolvers.splitconfigurationpath local methodhandler = resolvers.methodhandler local initializesetter = utilities.setters.initialize local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv resolvers.cacheversion = '1.0.1' resolvers.configbanner = '' resolvers.homedir = environment.homedir resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" } resolvers.luacnfname = 'texmfcnf.lua' resolvers.luacnfstate = "unknown" -- resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- what a rubish path resolvers.luacnfspec = 'selfautoparent:{/texmf{-local,}{,/web2c},}}' local unset_variable = "unset" local formats = resolvers.formats local suffixes = resolvers.suffixes local dangerous = resolvers.dangerous local suffixmap = resolvers.suffixmap resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky resolvers.instance = resolvers.instance or nil -- the current one (slow access) local instance = resolvers.instance or nil -- the current one (fast access) -- An instance has an environment (coming from the outside, kept raw), variables -- (coming from the configuration file), and expansions (variables with nested -- variables replaced). One can push something into the outer environment and -- its internal copy, but only the later one will be the raw unprefixed variant. function resolvers.setenv(key,value,raw) if instance then -- this one will be consulted first when we stay inside -- the current environment instance.environment[key] = value -- we feed back into the environment, and as this is used -- by other applications (via os.execute) we need to make -- sure that prefixes are resolve ossetenv(key,raw and value or resolvers.resolve(value)) end end -- Beware we don't want empty here as this one can be called early on -- and therefore we use rawget. local function getenv(key) local value = rawget(instance.environment,key) if value and value ~= "" then return value else local e = osgetenv(key) return e ~= nil and e ~= "" and checkedvariable(e) or "" end end resolvers.getenv = getenv resolvers.env = getenv -- We are going to use some metatable trickery where we backtrack from -- expansion to variable to environment. local function resolve(k) return instance.expansions[k] end local dollarstripper = lpeg.stripper("$") local inhibitstripper = P("!")^0 * Cs(P(1)^0) local backslashswapper = lpeg.replacer("\\","/") local somevariable = P("$") / "" local somekey = C(R("az","AZ","09","__","--")^1) local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "") + P(";") * (P(";") / "") + P(1) local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 ) local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";" local variablecleaner = Cs((cleaner + P(1))^0) local somevariable = R("az","AZ","09","__","--")^1 / resolve local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/"")) local variableresolver = Cs((variable + P(1))^0) local function expandedvariable(var) return lpegmatch(variableexpander,var) or var end function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only if trace_locating then report_resolving("creating instance") end local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate() local newinstance = { environment = environment, variables = variables, expansions = expansions, order = order, files = allocate(), setups = allocate(), found = allocate(), foundintrees = allocate(), hashes = allocate(), hashed = allocate(), specification = allocate(), lists = allocate(), data = allocate(), -- only for loading fakepaths = allocate(), remember = true, diskcache = true, renewcache = false, loaderror = false, savelists = true, pattern = nil, -- lists force_suffixes = true, } setmetatableindex(variables,function(t,k) local v for i=1,#order do v = order[i][k] if v ~= nil then t[k] = v return v end end if v == nil then v = "" end t[k] = v return v end) setmetatableindex(environment, function(t,k) local v = osgetenv(k) if v == nil then v = variables[k] end if v ~= nil then v = checkedvariable(v) or "" end v = resolvers.repath(v) -- for taco who has a : separated osfontdir t[k] = v return v end) setmetatableindex(expansions, function(t,k) local v = environment[k] if type(v) == "string" then v = lpegmatch(variableresolver,v) v = lpegmatch(variablecleaner,v) end t[k] = v return v end) return newinstance end function resolvers.setinstance(someinstance) -- only one instance is active instance = someinstance resolvers.instance = someinstance return someinstance end function resolvers.reset() return resolvers.setinstance(resolvers.newinstance()) end local function reset_hashes() instance.lists = { } instance.found = { } end local slash = P("/") local pathexpressionpattern = Cs ( Cc("^") * ( Cc("%") * S(".-") + slash^2 * P(-1) / "/.*" + slash^2 / "/.-/" + (1-slash) * P(-1) * Cc("/") + P(1) )^1 * Cc("$") -- yes or no $ ) local cache = { } local function makepathexpression(str) if str == "." then return "^%./$" else local c = cache[str] if not c then c = lpegmatch(pathexpressionpattern,str) cache[str] = c end return c end end local function reportcriticalvariables() if trace_locating then for i=1,#resolvers.criticalvars do local k = resolvers.criticalvars[i] local v = resolvers.getenv(k) or "unknown" -- this one will not resolve ! report_resolving("variable '%s' set to '%s'",k,v) end report_resolving() end reportcriticalvariables = function() end end local function identify_configuration_files() local specification = instance.specification if #specification == 0 then local cnfspec = getenv('TEXMFCNF') if cnfspec == "" then cnfspec = resolvers.luacnfspec resolvers.luacnfstate = "default" else resolvers.luacnfstate = "environment" end reportcriticalvariables() local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec)) local luacnfname = resolvers.luacnfname for i=1,#cnfpaths do local filename = collapsepath(filejoin(cnfpaths[i],luacnfname)) local realname = resolvers.resolve(filename) if lfs.isfile(realname) then specification[#specification+1] = filename if trace_locating then report_resolving("found configuration file '%s'",realname) end elseif trace_locating then report_resolving("unknown configuration file '%s'",realname) end end if trace_locating then report_resolving() end elseif trace_locating then report_resolving("configuration files already identified") end end local function load_configuration_files() local specification = instance.specification if #specification > 0 then local luacnfname = resolvers.luacnfname for i=1,#specification do local filename = specification[i] local pathname = filedirname(filename) local filename = filejoin(pathname,luacnfname) local realname = resolvers.resolve(filename) -- no shortcut local blob = loadfile(realname) if blob then local setups = instance.setups local data = blob() data = data and data.content if data then if trace_locating then report_resolving("loading configuration file '%s'",filename) report_resolving() end local variables = data.variables or { } local warning = false for k, v in next, data do local variant = type(v) if variant == "table" then initializesetter(filename,k,v) elseif variables[k] == nil then if trace_locating and not warning then report_resolving("variables like '%s' in configuration file '%s' should move to the 'variables' subtable", k,resolvers.resolve(filename)) warning = true end variables[k] = v end end setups[pathname] = variables if resolvers.luacnfstate == "default" then -- the following code is not tested local cnfspec = variables["TEXMFCNF"] if cnfspec then if trace_locating then report_resolving("reloading configuration due to TEXMF redefinition") end -- we push the value into the main environment (osenv) so -- that it takes precedence over the default one and therefore -- also over following definitions resolvers.setenv('TEXMFCNF',cnfspec) -- resolves prefixes -- we now identify and load the specified configuration files instance.specification = { } identify_configuration_files() load_configuration_files() -- we prevent further overload of the configuration variable resolvers.luacnfstate = "configuration" -- we quit the outer loop break end end else if trace_locating then report_resolving("skipping configuration file '%s' (no content)",filename) end setups[pathname] = { } instance.loaderror = true end elseif trace_locating then report_resolving("skipping configuration file '%s' (no file)",filename) end instance.order[#instance.order+1] = instance.setups[pathname] if instance.loaderror then break end end elseif trace_locating then report_resolving("warning: no lua configuration files found") end end -- scheme magic ... database loading local function load_file_databases() instance.loaderror, instance.files = false, allocate() if not instance.renewcache then local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] resolvers.hashers.byscheme(hash.type,hash.name) if instance.loaderror then break end end end end local function locate_file_databases() -- todo: cache:// and tree:// (runtime) local texmfpaths = resolvers.expandedpathlist('TEXMF') if #texmfpaths > 0 then for i=1,#texmfpaths do local path = collapsepath(texmfpaths[i]) local stripped = lpegmatch(inhibitstripper,path) -- the !! thing if stripped ~= "" then local runtime = stripped == path path = resolvers.cleanpath(path) local spec = resolvers.splitmethod(stripped) if spec.scheme == "cache" or spec.scheme == "file" then stripped = spec.path elseif runtime and (spec.noscheme or spec.scheme == "file") then stripped = "tree:///" .. stripped end if trace_locating then if runtime then report_resolving("locating list of '%s' (runtime)",path) else report_resolving("locating list of '%s' (cached)",path) end end methodhandler('locators',stripped) end end if trace_locating then report_resolving() end elseif trace_locating then report_resolving("no texmf paths are defined (using TEXMF)") end end local function generate_file_databases() local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] methodhandler('generators',hash.name) end if trace_locating then report_resolving() end end local function save_file_databases() -- will become cachers for i=1,#instance.hashes do local hash = instance.hashes[i] local cachename = hash.name if hash.cache then local content = instance.files[cachename] caches.collapsecontent(content) if trace_locating then report_resolving("saving tree '%s'",cachename) end caches.savecontent(cachename,"files",content) elseif trace_locating then report_resolving("not saving runtime tree '%s'",cachename) end end end local function load_databases() locate_file_databases() if instance.diskcache and not instance.renewcache then load_file_databases() if instance.loaderror then generate_file_databases() save_file_databases() end else generate_file_databases() if instance.renewcache then save_file_databases() end end end function resolvers.appendhash(type,name,cache) -- safeguard ... tricky as it's actually a bug when seen twice if not instance.hashed[name] then if trace_locating then report_resolving("hash '%s' appended",name) end insert(instance.hashes, { type = type, name = name, cache = cache } ) instance.hashed[name] = cache end end function resolvers.prependhash(type,name,cache) -- safeguard ... tricky as it's actually a bug when seen twice if not instance.hashed[name] then if trace_locating then report_resolving("hash '%s' prepended",name) end insert(instance.hashes, 1, { type = type, name = name, cache = cache } ) instance.hashed[name] = cache end end function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash local t = resolvers.splitpath(getenv('TEXMF')) insert(t,1,specification) local newspec = concat(t,";") if instance.environment["TEXMF"] then instance.environment["TEXMF"] = newspec elseif instance.variables["TEXMF"] then instance.variables["TEXMF"] = newspec else -- weird end reset_hashes() end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do local t, tn, h, p = { }, 0, { }, splitconfigurationpath(v) for kk=1,#p do local vv = p[kk] if vv ~= "" and not h[vv] then tn = tn + 1 t[tn] = vv h[vv] = true end end if #t > 1 then ie[k] = t else ie[k] = t[1] end end end -- end of split/join code -- we used to have 'files' and 'configurations' so therefore the following -- shared function function resolvers.datastate() return caches.contentstate() end function resolvers.variable(name) local name = name and lpegmatch(dollarstripper,name) local result = name and instance.variables[name] return result ~= nil and result or "" end function resolvers.expansion(name) local name = name and lpegmatch(dollarstripper,name) local result = name and instance.expansions[name] return result ~= nil and result or "" end function resolvers.unexpandedpathlist(str) local pth = resolvers.variable(str) local lst = resolvers.splitpath(pth) return expandedpathfromlist(lst) end function resolvers.unexpandedpath(str) return joinpath(resolvers.unexpandedpathlist(str)) end local done = { } function resolvers.resetextrapath() local ep = instance.extra_paths if not ep then ep, done = { }, { } instance.extra_paths = ep elseif #ep > 0 then instance.lists, done = { }, { } end end function resolvers.registerextrapath(paths,subpaths) local ep = instance.extra_paths or { } local oldn = #ep local newn = oldn if paths and paths ~= "" then if subpaths and subpaths ~= "" then for p in gmatch(paths,"[^,]+") do -- we gmatch each step again, not that fast, but used seldom for s in gmatch(subpaths,"[^,]+") do local ps = p .. "/" .. s if not done[ps] then newn = newn + 1 ep[newn] = resolvers.cleanpath(ps) done[ps] = true end end end else for p in gmatch(paths,"[^,]+") do if not done[p] then newn = newn + 1 ep[newn] = resolvers.cleanpath(p) done[p] = true end end end elseif subpaths and subpaths ~= "" then for i=1,oldn do -- we gmatch each step again, not that fast, but used seldom for s in gmatch(subpaths,"[^,]+") do local ps = ep[i] .. "/" .. s if not done[ps] then newn = newn + 1 ep[newn] = resolvers.cleanpath(ps) done[ps] = true end end end end if newn > 0 then instance.extra_paths = ep -- register paths end if newn > oldn then instance.lists = { } -- erase the cache end end local function made_list(instance,list) local ep = instance.extra_paths if not ep or #ep == 0 then return list else local done, new, newn = { }, { }, 0 -- honour . .. ../.. but only when at the start for k=1,#list do local v = list[k] if not done[v] then if find(v,"^[%.%/]$") then done[v] = true newn = newn + 1 new[newn] = v else break end end end -- first the extra paths for k=1,#ep do local v = ep[k] if not done[v] then done[v] = true newn = newn + 1 new[newn] = v end end -- next the formal paths for k=1,#list do local v = list[k] if not done[v] then done[v] = true newn = newn + 1 new[newn] = v end end return new end end function resolvers.cleanpathlist(str) local t = resolvers.expandedpathlist(str) if t then for i=1,#t do t[i] = collapsepath(resolvers.cleanpath(t[i])) end end return t end function resolvers.expandpath(str) return joinpath(resolvers.expandedpathlist(str)) end function resolvers.expandedpathlist(str) if not str then return { } elseif instance.savelists then str = lpegmatch(dollarstripper,str) if not instance.lists[str] then -- cached local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str))) instance.lists[str] = expandedpathfromlist(lst) end return instance.lists[str] else local lst = resolvers.splitpath(resolvers.expansion(str)) return made_list(instance,expandedpathfromlist(lst)) end end function resolvers.expandedpathlistfromvariable(str) -- brrr str = lpegmatch(dollarstripper,str) local tmp = resolvers.variableofformatorsuffix(str) return resolvers.expandedpathlist(tmp ~= "" and tmp or str) end function resolvers.expandpathfromvariable(str) return joinpath(resolvers.expandedpathlistfromvariable(str)) end function resolvers.expandbraces(str) -- output variable and brace expansion of STRING local ori = resolvers.variable(str) local pth = expandedpathfromlist(resolvers.splitpath(ori)) return joinpath(pth) end function resolvers.registerfilehash(name,content,someerror) if content then instance.files[name] = content else instance.files[name] = { } if somerror == true then -- can be unset instance.loaderror = someerror end end end local function isreadable(name) local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir if trace_detail then if readable then report_resolving("file '%s' is readable",name) else report_resolving("file '%s' is not readable", name) end end return readable end -- name -- name/name local function collect_files(names) local filelist, noffiles = { }, 0 for k=1,#names do local fname = names[k] if trace_detail then report_resolving("checking name '%s'",fname) end local bname = filebasename(fname) local dname = filedirname(fname) if dname == "" or find(dname,"^%.") then dname = false else dname = "/" .. dname .. "$" end local hashes = instance.hashes for h=1,#hashes do local hash = hashes[h] local blobpath = hash.name local files = blobpath and instance.files[blobpath] if files then if trace_detail then report_resolving("deep checking '%s' (%s)",blobpath,bname) end local blobfile = files[bname] if not blobfile then local rname = "remap:"..bname blobfile = files[rname] if blobfile then bname = files[rname] blobfile = files[bname] end end if blobfile then local blobroot = files.__path__ or blobpath if type(blobfile) == 'string' then if not dname or find(blobfile,dname) then local variant = hash.type -- local search = filejoin(blobpath,blobfile,bname) local search = filejoin(blobroot,blobfile,bname) local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname) if trace_detail then report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result) end noffiles = noffiles + 1 filelist[noffiles] = { variant, search, result } end else for kk=1,#blobfile do local vv = blobfile[kk] if not dname or find(vv,dname) then local variant = hash.type -- local search = filejoin(blobpath,vv,bname) local search = filejoin(blobroot,vv,bname) local result = methodhandler('concatinators',hash.type,blobroot,vv,bname) if trace_detail then report_resolving("match: variant '%s', search '%s', result '%s'",variant,search,result) end noffiles = noffiles + 1 filelist[noffiles] = { variant, search, result } end end end end elseif trace_locating then report_resolving("no match in '%s' (%s)",blobpath,bname) end end end return noffiles > 0 and filelist or nil end function resolvers.registerintrees(name) if not find(name,"^%.") then instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one end end -- split the next one up for readability (but this module needs a cleanup anyway) local function can_be_dir(name) -- can become local local fakepaths = instance.fakepaths if not fakepaths[name] then if lfs.isdir(name) then fakepaths[name] = 1 -- directory else fakepaths[name] = 2 -- no directory end end return fakepaths[name] == 1 end local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) -- this one is split in smaller functions but it needs testing local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc) local result = { } local stamp = nil askedformat = askedformat or "" filename = collapsepath(filename) -- speed up / beware: format problem if instance.remember and not allresults then stamp = filename .. "--" .. askedformat if instance.found[stamp] then if trace_locating then report_resolving("remembered file '%s'",filename) end resolvers.registerintrees(filename) -- for tracing used files return instance.found[stamp] end end if not dangerous[askedformat] then if isreadable(filename) then if trace_detail then report_resolving("file '%s' found directly",filename) end if stamp then instance.found[stamp] = { filename } end return { filename } end end if find(filename,'%*') then if trace_locating then report_resolving("checking wildcard '%s'", filename) end result = resolvers.findwildcardfiles(filename) -- we can use th elocal elseif file.is_qualified_path(filename) then if isreadable(filename) then if trace_locating then report_resolving("qualified name '%s'", filename) end result = { filename } else local forcedname, ok, suffix = "", false, fileextname(filename) if suffix == "" then -- why local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] if format_suffixes then for i=1,#format_suffixes do local s = format_suffixes[i] forcedname = filename .. "." .. s if isreadable(forcedname) then if trace_locating then report_resolving("no suffix, forcing format filetype '%s'", s) end result, ok = { forcedname }, true break end end end end if not ok and suffix ~= "" then -- try to find in tree (no suffix manipulation), here we search for the -- matching last part of the name local basename = filebasename(filename) local pattern = lpegmatch(preparetreepattern,filename) -- messy .. to be sorted out local savedformat = askedformat local format = savedformat or "" if format == "" then askedformat = resolvers.formatofsuffix(suffix) end if not format then askedformat = "othertextfiles" -- kind of everything, maybe texinput is better end -- if basename ~= filename then local resolved = collect_instance_files(basename,askedformat,allresults) if #result == 0 then -- shouldn't this be resolved ? local lowered = lower(basename) if filename ~= lowered then resolved = collect_instance_files(lowered,askedformat,allresults) end end resolvers.format = savedformat -- for r=1,#resolved do local rr = resolved[r] if find(rr,pattern) then result[#result+1], ok = rr, true end end end -- a real wildcard: -- -- if not ok then -- local filelist = collect_files({basename}) -- for f=1,#filelist do -- local ff = filelist[f][3] or "" -- if find(ff,pattern) then -- result[#result+1], ok = ff, true -- end -- end -- end end if not ok and trace_locating then report_resolving("qualified name '%s'", filename) end end else -- search spec local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename) -- -- tricky as filename can be bla.1.2.3 -- if not suffixmap[ext] then --- probably needs to be done elsewhere too -- wantedfiles[#wantedfiles+1] = filename -- end wantedfiles[#wantedfiles+1] = filename if askedformat == "" then if ext == "" or not suffixmap[ext] then local defaultsuffixes = resolvers.defaultsuffixes for i=1,#defaultsuffixes do local forcedname = filename .. '.' .. defaultsuffixes[i] wantedfiles[#wantedfiles+1] = forcedname filetype = resolvers.formatofsuffix(forcedname) if trace_locating then report_resolving("forcing filetype '%s'",filetype) end end else filetype = resolvers.formatofsuffix(filename) if trace_locating then report_resolving("using suffix based filetype '%s'",filetype) end end else if ext == "" or not suffixmap[ext] then local format_suffixes = suffixes[askedformat] if format_suffixes then for i=1,#format_suffixes do wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] end end end filetype = askedformat if trace_locating then report_resolving("using given filetype '%s'",filetype) end end local typespec = resolvers.variableofformat(filetype) local pathlist = resolvers.expandedpathlist(typespec) if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end for k=1,#wantedfiles do local fname = wantedfiles[k] if fname and isreadable(fname) then filename, done = fname, true result[#result+1] = filejoin('.',fname) break end end -- this is actually 'other text files' or 'any' or 'whatever' local filelist = collect_files(wantedfiles) local fl = filelist and filelist[1] if fl then filename = fl[3] -- not local? result[#result+1] = resolvers.resolve(filename) done = true end else -- list search local filelist = collect_files(wantedfiles) local dirlist = { } if filelist then for i=1,#filelist do dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble end end if trace_detail then report_resolving("checking filename '%s'",filename) end for k=1,#pathlist do local path = pathlist[k] local pathname = lpegmatch(inhibitstripper,path) local doscan = path == pathname -- no ^!! done = false -- using file list if filelist then -- compare list entries with permitted pattern -- /xx /xx// local expression = makepathexpression(pathname) if trace_detail then report_resolving("using pattern '%s' for path '%s'",expression,pathname) end for k=1,#filelist do local fl = filelist[k] local f = fl[2] local d = dirlist[k] if find(d,expression) then -- todo, test for readable result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut done = true if allresults then if trace_detail then report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) end else if trace_detail then report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) end break end elseif trace_detail then report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) end end end if not done and doscan then -- check if on disk / unchecked / does not work at all / also zips local scheme = url.hasscheme(pathname) if not scheme or scheme == "file" then local pname = gsub(pathname,"%.%*$",'') if not find(pname,"%*") then local ppname = gsub(pname,"/+$","") if can_be_dir(ppname) then for k=1,#wantedfiles do local w = wantedfiles[k] local fname = filejoin(ppname,w) if isreadable(fname) then if trace_detail then report_resolving("found '%s' by scanning",fname) end result[#result+1] = fname done = true if not allresults then break end end end else -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end end end if not done and doscan then -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF end if done and not allresults then break end end end end for k=1,#result do local rk = collapsepath(result[k]) result[k] = rk resolvers.registerintrees(rk) -- for tracing used files end if stamp then instance.found[stamp] = result end return result end -- -- -- begin of main file search routing -- -- -- -- -- -- end of main file search routing -- -- -- local function findfiles(filename,filetype,allresults) local result = collect_instance_files(filename,filetype or "",allresults) if #result == 0 then local lowered = lower(filename) if filename ~= lowered then return collect_instance_files(lowered,filetype or "",allresults) end end return result end function resolvers.findfiles(filename,filetype) return findfiles(filename,filetype,true) end function resolvers.findfile(filename,filetype) return findfiles(filename,filetype,false)[1] or "" end function resolvers.findpath(filename,filetype) return filedirname(findfiles(filename,filetype,false)[1] or "") end local function findgivenfiles(filename,allresults) local bname, result = filebasename(filename), { } local hashes = instance.hashes local noffound = 0 for k=1,#hashes do local hash = hashes[k] local files = instance.files[hash.name] or { } local blist = files[bname] if not blist then local rname = "remap:"..bname blist = files[rname] if blist then bname = files[rname] blist = files[bname] end end if blist then if type(blist) == 'string' then local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or "" if found ~= "" then noffound = noffound + 1 result[noffound] = resolvers.resolve(found) if not allresults then break end end else for kk=1,#blist do local vv = blist[kk] local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or "" if found ~= "" then noffound = noffound + 1 result[noffound] = resolvers.resolve(found) if not allresults then break end end end end end end return result end function resolvers.findgivenfiles(filename) return findgivenfiles(filename,true) end function resolvers.findgivenfile(filename) return findgivenfiles(filename,false)[1] or "" end local function doit(path,blist,bname,tag,variant,result,allresults) local done = false if blist and variant then local resolve = resolvers.resolve -- added if type(blist) == 'string' then -- make function and share code if find(lower(blist),path) then local full = methodhandler('concatinators',variant,tag,blist,bname) or "" result[#result+1] = resolve(full) done = true end else for kk=1,#blist do local vv = blist[kk] if find(lower(vv),path) then local full = methodhandler('concatinators',variant,tag,vv,bname) or "" result[#result+1] = resolve(full) done = true if not allresults then break end end end end end return done end local makewildcard = Cs( (P("^")^0 * P("/") * P(-1) + P(-1)) /".*" + (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0 ) function resolvers.wildcardpattern(pattern) return lpegmatch(makewildcard,pattern) or pattern end local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg result = result or { } local base = filebasename(filename) local dirn = filedirname(filename) local path = lower(lpegmatch(makewildcard,dirn) or dirn) local name = lower(lpegmatch(makewildcard,base) or base) local files, done = instance.files, false if find(name,"%*") then local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] local hashname, hashtype = hash.name, hash.type for kk, hh in next, files[hashname] do if not find(kk,"^remap:") then if find(lower(kk),name) then if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end if done and not allresults then break end end end end end else local hashes = instance.hashes for k=1,#hashes do local hash = hashes[k] local hashname, hashtype = hash.name, hash.type if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end if done and not allresults then break end end end -- we can consider also searching the paths not in the database, but then -- we end up with a messy search (all // in all path specs) return result end function resolvers.findwildcardfiles(filename,result) return findwildcardfiles(filename,true,result) end function resolvers.findwildcardfile(filename) return findwildcardfiles(filename,false)[1] or "" end -- main user functions function resolvers.automount() -- implemented later end function resolvers.load(option) statistics.starttiming(instance) identify_configuration_files() load_configuration_files() if option ~= "nofiles" then load_databases() resolvers.automount() end statistics.stoptiming(instance) local files = instance.files return files and next(files) and true end local function report(str) if trace_locating then report_resolving(str) -- has already verbose else print(str) end end function resolvers.dowithfilesandreport(command, files, ...) -- will move if files and #files > 0 then if trace_locating then report('') -- ? end for f=1,#files do local file = files[f] local result = command(file,...) if type(result) == 'string' then report(result) else for i=1,#result do report(result[i]) -- could be unpack end end end end end -- obsolete -- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING. -- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING. function resolvers.showpath(str) -- output search path for file type NAME return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str))) end function resolvers.registerfile(files, name, path) if files[name] then if type(files[name]) == 'string' then files[name] = { files[name], path } else files[name] = path end else files[name] = path end end function resolvers.dowithpath(name,func) local pathlist = resolvers.expandedpathlist(name) for i=1,#pathlist do func("^"..resolvers.cleanpath(pathlist[i])) end end function resolvers.dowithvariable(name,func) func(expandedvariable(name)) end function resolvers.locateformat(name) local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","") local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or "" if fmtname == "" then fmtname = resolvers.findfile(barename..".fmt") fmtname = resolvers.cleanpath(fmtname) end if fmtname ~= "" then local barename = file.removesuffix(fmtname) local luaname, lucname, luiname = barename .. ".lua", barename .. ".luc", barename .. ".lui" if lfs.isfile(luiname) then return barename, luiname elseif lfs.isfile(lucname) then return barename, lucname elseif lfs.isfile(luaname) then return barename, luaname end end return nil, nil end function resolvers.booleanvariable(str,default) local b = resolvers.expansion(str) if b == "" then return default else b = toboolean(b) return (b == nil and default) or b end end function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead local instance = resolvers.instance local hashes = instance.hashes for i=1,#hashes do local hash = hashes[i] local blobtype = hash.type local blobpath = hash.name if blobpath then if before then before(blobtype,blobpath,pattern) end local files = instance.files[blobpath] local total, checked, done = 0, 0, 0 if files then for k,v in next, files do total = total + 1 if find(k,"^remap:") then k = files[k] v = k -- files[k] -- chained end if find(k,pattern) then if type(v) == "string" then checked = checked + 1 if handle(blobtype,blobpath,v,k) then done = done + 1 end else checked = checked + #v for i=1,#v do if handle(blobtype,blobpath,v[i],k) then done = done + 1 end end end end end end if after then after(blobtype,blobpath,pattern,total,checked,done) end end end end resolvers.obsolete = resolvers.obsolete or { } local obsolete = resolvers.obsolete resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-pre'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -- It could be interesting to hook the resolver in the file -- opener so that unresolved prefixes travel around and we -- get more abstraction. -- As we use this beforehand we will move this up in the chain -- of loading. local resolvers = resolvers local prefixes = utilities.storage.allocate() resolvers.prefixes = prefixes local gsub = string.gsub local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion local getenv = resolvers.getenv -- we can probably also use resolvers.expansion local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match prefixes.environment = function(str) return cleanpath(expansion(str)) end prefixes.relative = function(str,n) -- lfs.isfile if io.exists(str) then -- nothing elseif io.exists("./" .. str) then str = "./" .. str else local p = "../" for i=1,n or 2 do if io.exists(p .. str) then str = p .. str break else p = p .. "../" end end end return cleanpath(str) end prefixes.auto = function(str) local fullname = prefixes.relative(str) if not lfs.isfile(fullname) then fullname = prefixes.locate(str) end return fullname end prefixes.locate = function(str) local fullname = findgivenfile(str) or "" return cleanpath((fullname ~= "" and fullname) or str) end prefixes.filename = function(str) local fullname = findgivenfile(str) or "" return cleanpath(file.basename((fullname ~= "" and fullname) or str)) end prefixes.pathname = function(str) local fullname = findgivenfile(str) or "" return cleanpath(file.dirname((fullname ~= "" and fullname) or str)) end prefixes.selfautoloc = function(str) return cleanpath(file.join(getenv('SELFAUTOLOC'),str)) end prefixes.selfautoparent = function(str) return cleanpath(file.join(getenv('SELFAUTOPARENT'),str)) end prefixes.selfautodir = function(str) return cleanpath(file.join(getenv('SELFAUTODIR'),str)) end prefixes.home = function(str) return cleanpath(file.join(getenv('HOME'),str)) end prefixes.env = prefixes.environment prefixes.rel = prefixes.relative prefixes.loc = prefixes.locate prefixes.kpse = prefixes.locate prefixes.full = prefixes.locate prefixes.file = prefixes.filename prefixes.path = prefixes.pathname function resolvers.allprefixes(separator) local all = table.sortedkeys(prefixes) if separator then for i=1,#all do all[i] = all[i] .. ":" end end return all end local function _resolve_(method,target) if prefixes[method] then return prefixes[method](target) else return method .. ":" .. target end end local resolved, abstract = { }, { } function resolvers.resetresolve(str) resolved, abstract = { }, { } end local function resolve(str) -- use schemes, this one is then for the commandline only local res = resolved[str] if not res then res = gsub(str,"([a-z][a-z]+):([^ \"\']*)",_resolve_) resolved[str] = res abstract[res] = str end return res end local function unresolve(str) return abstract[str] or str end resolvers.resolve = resolve resolvers.unresolve = unresolve if os.uname then for k, v in next, os.uname() do if not prefixes[k] then prefixes[k] = function() return v end end end end if os.type == "unix" then local pattern local function makepattern(t,k,v) local colon = P(":") local p for k, v in table.sortedpairs(prefixes) do if p then p = P(k) + p else p = P(k) end end pattern = Cs((p * colon + colon/";" + P(1))^0) if t then t[k] = v end end makepattern() getmetatable(prefixes).__newindex = makepattern function resolvers.repath(str) return lpegmatch(pattern,str) end else -- already the default: function resolvers.repath(str) return str end end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-inp'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local allocate = utilities.storage.allocate local resolvers = resolvers local methodhandler = resolvers.methodhandler local registermethod = resolvers.registermethod local finders = allocate { helpers = { }, notfound = function() end } local openers = allocate { helpers = { }, notfound = function() end } local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end } registermethod("finders", finders, "uri") registermethod("openers", openers, "uri") registermethod("loaders", loaders, "uri") resolvers.finders = finders resolvers.openers = openers resolvers.loaders = loaders end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-out'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local allocate = utilities.storage.allocate local resolvers = resolvers local registermethod = resolvers.registermethod local savers = allocate { helpers = { } } resolvers.savers = savers registermethod("savers", savers, "uri") end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-fil'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local report_files = logs.reporter("resolvers","files") local resolvers = resolvers local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check function locators.file(specification) local name = specification.filename local realname = resolvers.resolve(name) -- no shortcut if realname and realname ~= '' and lfs.isdir(realname) then if trace_locating then report_files("file locator '%s' found as '%s'",name,realname) end resolvers.appendhash('file',name,true) -- cache elseif trace_locating then report_files("file locator '%s' not found",name) end end function hashers.file(specification) local name = specification.filename local content = caches.loadcontent(name,'files') resolvers.registerfilehash(name,content,content==nil) end function generators.file(specification) local path = specification.filename local content = resolvers.scanfiles(path) resolvers.registerfilehash(path,content,true) end concatinators.file = file.join function finders.file(specification,filetype) local filename = specification.filename local foundname = resolvers.findfile(filename,filetype) if foundname and foundname ~= "" then if trace_locating then report_files("file finder: '%s' found",filename) end return foundname else if trace_locating then report_files("file finder: %s' not found",filename) end return finders.notfound() end end -- The default textopener will be overloaded later on. function openers.helpers.textopener(tag,filename,f) return { reader = function() return f:read () end, close = function() logs.show_close(filename) return f:close() end, } end function openers.file(specification,filetype) local filename = specification.filename if filename and filename ~= "" then local f = io.open(filename,"r") if f then if trace_locating then report_files("file opener, '%s' opened",filename) end return openers.helpers.textopener("file",filename,f) end end if trace_locating then report_files("file opener, '%s' not found",filename) end return openers.notfound() end function loaders.file(specification,filetype) local filename = specification.filename if filename and filename ~= "" then local f = io.open(filename,"rb") if f then logs.show_load(filename) if trace_locating then report_files("file loader, '%s' loaded",filename) end local s = f:read("*a") if checkgarbage then checkgarbage(#s) end f:close() if s then return true, s, #s end end end if trace_locating then report_files("file loader, '%s' not found",filename) end return loaders.notfound() end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-con'] = { version = 1.100, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local format, lower, gsub = string.format, string.lower, string.gsub local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end) local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end) --[[ldx--Once we found ourselves defining similar cache constructs several times, containers were introduced. Containers are used to collect tables in memory and reuse them when possible based on (unique) hashes (to be provided by the calling function).
Caching to disk is disabled by default. Version numbers are stored in the saved table which makes it possible to change the table structures without bothering about the disk cache.
Examples of usage can be found in the font related code.
--ldx]]-- containers = containers or { } local containers = containers containers.usecache = true local report_containers = logs.reporter("resolvers","containers") local function report(container,tag,name) if trace_cache or trace_containers then report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid') end end local allocated = { } local mt = { __index = function(t,k) if k == "writable" then local writable = caches.getwritablepath(t.category,t.subcategory) or { "." } t.writable = writable return writable elseif k == "readables" then local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." } t.readables = readables return readables end end, __storage__ = true } function containers.define(category, subcategory, version, enabled) if category and subcategory then local c = allocated[category] if not c then c = { } allocated[category] = c end local s = c[subcategory] if not s then s = { category = category, subcategory = subcategory, storage = { }, enabled = enabled, version = version or math.pi, -- after all, this is TeX trace = false, -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." }, -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." }, } setmetatable(s,mt) c[subcategory] = s end return s end end function containers.is_usable(container, name) return container.enabled and caches and caches.is_writable(container.writable, name) end function containers.is_valid(container, name) if name and name ~= "" then local storage = container.storage[name] return storage and storage.cache_version == container.version else return false end end function containers.read(container,name) local storage = container.storage local stored = storage[name] if not stored and container.enabled and caches and containers.usecache then stored = caches.loaddata(container.readables,name) if stored and stored.cache_version == container.version then report(container,"loaded",name) else stored = nil end storage[name] = stored elseif stored then report(container,"reusing",name) end return stored end function containers.write(container, name, data) if data then data.cache_version = container.version if container.enabled and caches then local unique, shared = data.unique, data.shared data.unique, data.shared = nil, nil caches.savedata(container.writable, name, data) report(container,"saved",name) data.unique, data.shared = unique, shared end report(container,"stored",name) container.storage[name] = data end return data end function containers.content(container,name) return container.storage[name] end function containers.cleanname(name) return (gsub(lower(name),"[^%w%d]+","-")) end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-use'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local format, lower, gsub, find = string.format, string.lower, string.gsub, string.find local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local report_mounts = logs.reporter("resolvers","mounts") local resolvers = resolvers -- we will make a better format, maybe something xml or just text or lua resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT')) if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = caches.getreadablepaths("mount") end if mountpaths and #mountpaths > 0 then statistics.starttiming(resolvers.instance) for k=1,#mountpaths do local root = mountpaths[k] local f = io.open(root.."/url.tmi") if f then for line in f:lines() do if line then if find(line,"^[%%#%-]") then -- or %W -- skip elseif find(line,"^zip://") then if trace_locating then report_mounts("mounting %s",line) end table.insert(resolvers.automounted,line) resolvers.usezipfile(line) end end end f:close() end end statistics.stoptiming(resolvers.instance) end end -- status info statistics.register("used config file", function() return caches.configfiles() end) statistics.register("used cache path", function() return caches.usedpaths() end) -- experiment (code will move) function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == formatname local enginebanner = status.list().banner if formatbanner and enginebanner and sourcefile then local luvname = file.replacesuffix(texname,"luv") local luvdata = { enginebanner = enginebanner, formatbanner = formatbanner, sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"), sourcefile = sourcefile, } io.savedata(luvname,table.serialize(luvdata,true)) end end function statistics.checkfmtstatus(texname) local enginebanner = status.list().banner if enginebanner and texname then local luvname = file.replacesuffix(texname,"luv") if lfs.isfile(luvname) then local luv = dofile(luvname) if luv and luv.sourcefile then local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown") local luvbanner = luv.enginebanner or "?" if luvbanner ~= enginebanner then return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner) end local luvhash = luv.sourcehash or "?" if luvhash ~= sourcehash then return format("source mismatch (luv: %s <> bin: %s)",luvhash,sourcehash) end else return "invalid status file" end else return "missing status file" end end return true end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-zip'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -- partly redone .. needs testing local format, find, match = string.format, string.find, string.match local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local report_zip = logs.reporter("resolvers","zip") -- zip:///oeps.zip?name=bla/bla.tex -- zip:///oeps.zip?tree=tex/texmf-local -- zip:///texmf.zip?tree=/tex/texmf -- zip:///texmf.zip?tree=/tex/texmf-local -- zip:///texmf-mine.zip?tree=/tex/texmf-projects local resolvers = resolvers zip = zip or { } local zip = zip zip.archives = zip.archives or { } local archives = zip.archives zip.registeredfiles = zip.registeredfiles or { } local registeredfiles = zip.registeredfiles local function validzip(str) -- todo: use url splitter if not find(str,"^zip://") then return "zip:///" .. str else return str end end function zip.openarchive(name) if not name or name == "" then return nil else local arch = archives[name] if not arch then local full = resolvers.findfile(name) or "" arch = (full ~= "" and zip.open(full)) or false archives[name] = arch end return arch end end function zip.closearchive(name) if not name or (name == "" and archives[name]) then zip.close(archives[name]) archives[name] = nil end end function resolvers.locators.zip(specification) local archive = specification.filename local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree if trace_locating then if zipfile then report_zip("locator, archive '%s' found",archive) else report_zip("locator, archive '%s' not found",archive) end end end function resolvers.hashers.zip(specification) local archive = specification.filename if trace_locating then report_zip("loading file '%s'",archive) end resolvers.usezipfile(specification.original) end function resolvers.concatinators.zip(zipfile,path,name) -- ok ? if not path or path == "" then return format('%s?name=%s',zipfile,name) else return format('%s?name=%s/%s',zipfile,path,name) end end function resolvers.finders.zip(specification) local original = specification.original local archive = specification.filename if archive then local query = url.query(specification.query) local queryname = query.name if queryname then local zfile = zip.openarchive(archive) if zfile then if trace_locating then report_zip("finder, archive '%s' found",archive) end local dfile = zfile:open(queryname) if dfile then dfile = zfile:close() if trace_locating then report_zip("finder, file '%s' found",queryname) end return specification.original elseif trace_locating then report_zip("finder, file '%s' not found",queryname) end elseif trace_locating then report_zip("finder, unknown archive '%s'",archive) end end end if trace_locating then report_zip("finder, '%s' not found",original) end return resolvers.finders.notfound() end function resolvers.openers.zip(specification) local original = specification.original local archive = specification.filename if archive then local query = url.query(specification.query) local queryname = query.name if queryname then local zfile = zip.openarchive(archive) if zfile then if trace_locating then report_zip("opener, archive '%s' opened",archive) end local dfile = zfile:open(queryname) if dfile then if trace_locating then report_zip("opener, file '%s' found",queryname) end return resolvers.openers.helpers.textopener('zip',original,dfile) elseif trace_locating then report_zip("opener, file '%s' not found",queryname) end elseif trace_locating then report_zip("opener, unknown archive '%s'",archive) end end end if trace_locating then report_zip("opener, '%s' not found",original) end return resolvers.openers.notfound() end function resolvers.loaders.zip(specification) local original = specification.original local archive = specification.filename if archive then local query = url.query(specification.query) local queryname = query.name if queryname then local zfile = zip.openarchive(archive) if zfile then if trace_locating then report_zip("loader, archive '%s' opened",archive) end local dfile = zfile:open(queryname) if dfile then logs.show_load(original) if trace_locating then report_zip("loader, file '%s' loaded",original) end local s = dfile:read("*all") dfile:close() return true, s, #s elseif trace_locating then report_zip("loader, file '%s' not found",queryname) end elseif trace_locating then report_zip("loader, unknown archive '%s'",archive) end end end if trace_locating then report_zip("loader, '%s' not found",original) end return resolvers.openers.notfound() end -- zip:///somefile.zip -- zip:///somefile.zip?tree=texmf-local -> mount function resolvers.usezipfile(archive) local specification = resolvers.splitmethod(archive) -- to be sure local archive = specification.filename if archive and not registeredfiles[archive] then local z = zip.openarchive(archive) if z then local tree = url.query(specification.query).tree or "" if trace_locating then report_zip("registering, registering archive '%s'",archive) end statistics.starttiming(resolvers.instance) resolvers.prependhash('zip',archive) resolvers.extendtexmfvariable(archive) -- resets hashes too registeredfiles[archive] = z instance.files[archive] = resolvers.registerzipfile(z,tree) statistics.stoptiming(resolvers.instance) elseif trace_locating then report_zip("registering, unknown archive '%s'",archive) end elseif trace_locating then report_zip("registering, '%s' not found",archive) end end function resolvers.registerzipfile(z,tree) local files, filter = { }, "" if tree == "" then filter = "^(.+)/(.-)$" else filter = format("^%s/(.+)/(.-)$",tree) end if trace_locating then report_zip("registering, using filter '%s'",filter) end local register, n = resolvers.registerfile, 0 for i in z:files() do local path, name = match(i.filename,filter) if path then if name and name ~= '' then register(files, name, path) n = n + 1 else -- directory end else register(files, i.filename, '') n = n + 1 end end report_zip("registering, %s files registered",n) return files end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tre'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -- \input tree://oeps1/**/oeps.tex local find, gsub, format = string.find, string.gsub, string.format local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local report_trees = logs.reporter("resolvers","trees") local resolvers = resolvers local done, found, notfound = { }, { }, resolvers.finders.notfound function resolvers.finders.tree(specification) local spec = specification.filename local fnd = found[spec] if fnd == nil then if spec ~= "" then local path, name = file.dirname(spec), file.basename(spec) if path == "" then path = "." end local hash = done[path] if not hash then local pattern = path .. "/*" -- we will use the proper splitter hash = dir.glob(pattern) done[path] = hash end local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$" for k=1,#hash do local v = hash[k] if find(v,pattern) then found[spec] = v return v end end end fnd = notfound() -- false found[spec] = fnd end return fnd end function resolvers.locators.tree(specification) local name = specification.filename if name ~= '' and lfs.isdir(name) then if trace_locating then report_trees("locator '%s' found",name) end resolvers.appendhash('tree',name,false) -- don't cache elseif trace_locating then report_trees("locator '%s' not found",name) end end function resolvers.hashers.tree(specification) local name = specification.filename if trace_locating then report_trees("analysing '%s'",name) end resolvers.methodhandler("hashers",name) end resolvers.concatinators.tree = resolvers.concatinators.file resolvers.generators.tree = resolvers.generators.file resolvers.openers.tree = resolvers.openers.file resolvers.loaders.tree = resolvers.loaders.file end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-crl'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -- this one is replaced by data-sch.lua -- local gsub = string.gsub local resolvers = resolvers local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders resolvers.curl = resolvers.curl or { } local curl = resolvers.curl local cached = { } local function runcurl(specification) local original = specification.original -- local scheme = specification.scheme local cleanname = gsub(original,"[^%a%d%.]+","-") local cachename = caches.setfirstwritablefile(cleanname,"curl") if not cached[original] then if not io.exists(cachename) then cached[original] = cachename local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original os.spawn(command) end if io.exists(cachename) then cached[original] = cachename else cached[original] = "" end end return cached[original] end -- old code: we could be cleaner using specification (see schemes) local function finder(specification,filetype) return resolvers.methodhandler("finders",runcurl(specification),filetype) end local opener = openers.file local loader = loaders.file local function install(scheme) finders[scheme] = finder openers[scheme] = opener loaders[scheme] = loader end resolvers.curl.install = install install('http') install('https') install('ftp') end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-lua'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -- some loading stuff ... we might move this one to slot 2 depending -- on the developments (the loaders must not trigger kpse); we could -- of course use a more extensive lib path spec local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local report_libraries = logs.reporter("resolvers","libraries") local gsub, insert = string.gsub, table.insert local unpack = unpack or table.unpack local resolvers, package = resolvers, package local libformats = { 'luatexlibs', 'tex', 'texmfscripts', 'othertextfiles' } -- 'luainputs' local clibformats = { 'lib' } local _path_, libpaths, _cpath_, clibpaths function package.libpaths() if not _path_ or package.path ~= _path_ then _path_ = package.path libpaths = file.splitpath(_path_,";") end return libpaths end function package.clibpaths() if not _cpath_ or package.cpath ~= _cpath_ then _cpath_ = package.cpath clibpaths = file.splitpath(_cpath_,";") end return clibpaths end local function thepath(...) local t = { ... } t[#t+1] = "?.lua" local path = file.join(unpack(t)) if trace_locating then report_libraries("! appending '%s' to 'package.path'",path) end return path end local p_libpaths, a_libpaths = { }, { } function package.appendtolibpath(...) insert(a_libpath,thepath(...)) end function package.prependtolibpath(...) insert(p_libpaths,1,thepath(...)) end -- beware, we need to return a loadfile result ! local function loaded(libpaths,name,simple) for i=1,#libpaths do -- package.path, might become option local libpath = libpaths[i] local resolved = gsub(libpath,"%?",simple) if trace_locating then -- more detail report_libraries("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved) end if file.is_readable(resolved) then if trace_locating then report_libraries("! lib '%s' located via 'package.path': '%s'",name,resolved) end return loadfile(resolved) end end end package.loaders[2] = function(name) -- was [#package.loaders+1] if trace_locating then -- mode detail report_libraries("! locating '%s'",name) end for i=1,#libformats do local format = libformats[i] local resolved = resolvers.findfile(name,format) or "" if trace_locating then -- mode detail report_libraries("! checking for '%s' using 'libformat path': '%s'",name,format) end if resolved ~= "" then if trace_locating then report_libraries("! lib '%s' located via environment: '%s'",name,resolved) end return loadfile(resolved) end end -- libpaths local libpaths, clibpaths = package.libpaths(), package.clibpaths() local simple = gsub(name,"%.lua$","") local simple = gsub(simple,"%.","/") local resolved = loaded(p_libpaths,name,simple) or loaded(libpaths,name,simple) or loaded(a_libpaths,name,simple) if resolved then return resolved end -- local libname = file.addsuffix(simple,os.libsuffix) for i=1,#clibformats do -- better have a dedicated loop local format = clibformats[i] local paths = resolvers.expandedpathlistfromvariable(format) for p=1,#paths do local path = paths[p] local resolved = file.join(path,libname) if trace_locating then -- mode detail report_libraries("! checking for '%s' using 'clibformat path': '%s'",libname,path) end if file.is_readable(resolved) then if trace_locating then report_libraries("! lib '%s' located via 'clibformat': '%s'",libname,resolved) end return package.loadlib(resolved,name) end end end for i=1,#clibpaths do -- package.path, might become option local libpath = clibpaths[i] local resolved = gsub(libpath,"?",simple) if trace_locating then -- more detail report_libraries("! checking for '%s' on 'package.cpath': '%s'",simple,libpath) end if file.is_readable(resolved) then if trace_locating then report_libraries("! lib '%s' located via 'package.cpath': '%s'",name,resolved) end return package.loadlib(resolved,name) end end -- just in case the distribution is messed up if trace_loading then -- more detail report_libraries("! checking for '%s' using 'luatexlibs': '%s'",name) end local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or "" if resolved ~= "" then if trace_locating then report_libraries("! lib '%s' located by basename via environment: '%s'",name,resolved) end return loadfile(resolved) end if trace_locating then report_libraries('? unable to locate lib: %s',name) end -- return "unable to locate " .. name end resolvers.loadlualib = require -- -- -- -- package.obsolete = package.obsolete or { } package.append_libpath = appendtolibpath -- will become obsolete package.prepend_libpath = prependtolibpath -- will become obsolete package.obsolete.append_libpath = appendtolibpath -- will become obsolete package.obsolete.prepend_libpath = prependtolibpath -- will become obsolete end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-aux'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local find = string.find local type, next = type, next local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local resolvers = resolvers local report_scripts = logs.reporter("resolvers","scripts") function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") local oldscript = resolvers.cleanpath(oldname) if trace_locating then report_scripts("to be replaced old script %s", oldscript) end local newscripts = resolvers.findfiles(newname) or { } if #newscripts == 0 then if trace_locating then report_scripts("unable to locate new script") end else for i=1,#newscripts do local newscript = resolvers.cleanpath(newscripts[i]) if trace_locating then report_scripts("checking new script %s", newscript) end if oldscript == newscript then if trace_locating then report_scripts("old and new script are the same") end elseif not find(newscript,scriptpath) then if trace_locating then report_scripts("new script should come from %s",scriptpath) end elseif not (find(oldscript,file.removesuffix(newname).."$") or find(oldscript,newname.."$")) then if trace_locating then report_scripts("invalid new script name") end else local newdata = io.loaddata(newscript) if newdata then if trace_locating then report_scripts("old script content replaced by new content") end io.savedata(oldscript,newdata) break elseif trace_locating then report_scripts("unable to load new script") end end end end end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-tmf'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local resolvers = resolvers local report_tds = logs.reporter("resolvers","tds") -- = << -- ? ?? -- < += -- > =+ function resolvers.load_tree(tree,resolve) if type(tree) == "string" and tree ~= "" then local getenv, setenv = resolvers.getenv, resolvers.setenv -- later might listen to the raw osenv var as well local texos = "texmf-" .. os.platform local oldroot = environment.texroot local newroot = file.collapsepath(tree) local newtree = file.join(newroot,texos) local newpath = file.join(newtree,"bin") if not lfs.isdir(newtree) then report_tds("no '%s' under tree %s",texos,tree) os.exit() end if not lfs.isdir(newpath) then report_tds("no '%s/bin' under tree %s",texos,tree) os.exit() end local texmfos = newtree environment.texroot = newroot environment.texos = texos environment.texmfos = texmfos -- Beware, we need to obey the relocatable autoparent so we -- set TEXMFCNF to its raw value. This is somewhat tricky when -- we run a mkii job from within. Therefore, in mtxrun, there -- is a resolve applied when we're in mkii/kpse mode. setenv('SELFAUTOPARENT', newroot) setenv('SELFAUTODIR', newtree) setenv('SELFAUTOLOC', newpath) setenv('TEXROOT', newroot) setenv('TEXOS', texos) setenv('TEXMFOS', texmfos) setenv('TEXMFCNF', resolvers.luacnfspec, not resolve) setenv("PATH", newpath .. io.pathseparator .. getenv("PATH")) report_tds("changing from root '%s' to '%s'",oldroot,newroot) report_tds("prepending '%s' to binary path",newpath) report_tds() end end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['data-lst'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -- used in mtxrun, can be loaded later .. todo local find, concat, upper, format = string.find, table.concat, string.upper, string.format resolvers.listers = resolvers.listers or { } local resolvers = resolvers local report_lists = logs.reporter("resolvers","lists") local function tabstr(str) if type(str) == 'table' then return concat(str," | ") else return str end end function resolvers.listers.variables(pattern) local instance = resolvers.instance local environment = instance.environment local variables = instance.variables local expansions = instance.expansions local pattern = upper(pattern or "") local configured = { } local order = instance.order for i=1,#order do for k, v in next, order[i] do if v ~= nil and configured[k] == nil then configured[k] = v end end end local env = table.fastcopy(environment) local var = table.fastcopy(variables) local exp = table.fastcopy(expansions) for key, value in table.sortedpairs(configured) do if key ~= "" and (pattern == "" or find(upper(key),pattern)) then report_lists(key) report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset") report_lists(" var: %s",tabstr(configured[key]) or "unset") report_lists(" exp: %s",tabstr(expansions[key]) or "unset") report_lists(" res: %s",resolvers.resolve(expansions[key]) or "unset") end end instance.environment = table.fastcopy(env) instance.variables = table.fastcopy(var) instance.expansions = table.fastcopy(exp) end function resolvers.listers.configurations(report) local configurations = resolvers.instance.specification local report = report or texio.write_nl for i=1,#configurations do report(resolvers.resolve(configurations[i])) end end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-sta'] = { version = 1.001, author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } -- this code is used in the updater local gmatch, match = string.gmatch, string.match local type = type states = states or { } local states = states states.data = states.data or { } local data = states.data states.hash = states.hash or { } local hash = states.hash states.tag = states.tag or "" states.filename = states.filename or "" function states.save(filename,tag) tag = tag or states.tag filename = file.addsuffix(filename or states.filename,'lus') io.savedata(filename, "-- generator : luat-sta.lua\n" .. "-- state tag : " .. tag .. "\n\n" .. table.serialize(data[tag or states.tag] or {},true) ) end function states.load(filename,tag) states.filename = filename states.tag = tag or "whatever" states.filename = file.addsuffix(states.filename,'lus') data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { } end local function set_by_tag(tag,key,value,default,persistent) local d, h = data[tag], hash[tag] if d then if type(d) == "table" then local dkey, hkey = key, key local pre, post = match(key,"(.+)%.([^%.]+)$") if pre and post then for k in gmatch(pre,"[^%.]+") do local dk = d[k] if not dk then dk = { } d[k] = dk elseif type(dk) == "string" then -- invalid table, unable to upgrade structure -- hope for the best or delete the state file break end d = dk end dkey, hkey = post, key end if value == nil then value = default elseif value == false then -- special case elseif persistent then value = value or d[dkey] or default else value = value or default end d[dkey], h[hkey] = value, value elseif type(d) == "string" then -- weird data[tag], hash[tag] = value, value end end end local function get_by_tag(tag,key,default) local h = hash[tag] if h and h[key] then return h[key] else local d = data[tag] if d then for k in gmatch(key,"[^%.]+") do local dk = d[k] if dk ~= nil then d = dk else return default end end if d == false then return false else return d or default end end end end states.set_by_tag = set_by_tag states.get_by_tag = get_by_tag function states.set(key,value,default,persistent) set_by_tag(states.tag,key,value,default,persistent) end function states.get(key,default) return get_by_tag(states.tag,key,default) end end -- of closure do -- create closure to overcome 200 locals limit if not modules then modules = { } end modules ['luat-fmt'] = { version = 1.001, comment = "companion to mtxrun", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", copyright = "PRAGMA ADE / ConTeXt Development Team", license = "see context related readme files" } local format = string.format local report_format = logs.reporter("resolvers","formats") -- helper for mtxrun local quoted = string.quoted local function primaryflags() -- not yet ok local trackers = environment.argument("trackers") local directives = environment.argument("directives") local flags = "" if trackers and trackers ~= "" then flags = flags .. "--trackers=" .. quoted(trackers) end if directives and directives ~= "" then flags = flags .. "--directives=" .. quoted(directives) end return flags end function environment.make_format(name) -- change to format path (early as we need expanded paths) local olddir = lfs.currentdir() local path = caches.getwritablepath("formats") or "" -- maybe platform if path ~= "" then lfs.chdir(path) end report_format("format path: %s",lfs.currentdir()) -- check source file local texsourcename = file.addsuffix(name,"mkiv") local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or "" if fulltexsourcename == "" then texsourcename = file.addsuffix(name,"tex") fulltexsourcename = resolvers.findfile(texsourcename,"tex") or "" end if fulltexsourcename == "" then report_format("no tex source file with name: %s (mkiv or tex)",name) lfs.chdir(olddir) return else report_format("using tex source file: %s",fulltexsourcename) end local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed -- check specification local specificationname = file.replacesuffix(fulltexsourcename,"lus") local fullspecificationname = resolvers.findfile(specificationname,"tex") or "" if fullspecificationname == "" then specificationname = file.join(texsourcepath,"context.lus") fullspecificationname = resolvers.findfile(specificationname,"tex") or "" end if fullspecificationname == "" then report_format("unknown stub specification: %s",specificationname) lfs.chdir(olddir) return end local specificationpath = file.dirname(fullspecificationname) -- load specification local usedluastub = nil local usedlualibs = dofile(fullspecificationname) if type(usedlualibs) == "string" then usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs) elseif type(usedlualibs) == "table" then report_format("using stub specification: %s",fullspecificationname) local texbasename = file.basename(name) local luastubname = file.addsuffix(texbasename,"lua") local lucstubname = file.addsuffix(texbasename,"luc") -- pack libraries in stub report_format("creating initialization file: %s",luastubname) utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname) -- compile stub file (does not save that much as we don't use this stub at startup any more) local strip = resolvers.booleanvariable("LUACSTRIP", true) if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then report_format("using compiled initialization file: %s",lucstubname) usedluastub = lucstubname else report_format("using uncompiled initialization file: %s",luastubname) usedluastub = luastubname end else report_format("invalid stub specification: %s",fullspecificationname) lfs.chdir(olddir) return end -- generate format local command = format("luatex --ini %s --lua=%s %s %sdump",primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\") report_format("running command: %s\n",command) os.spawn(command) -- remove related mem files local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem" -- report_format("removing related mplib format with pattern '%s'", pattern) local mp = dir.glob(pattern) if mp then for i=1,#mp do local name = mp[i] report_format("removing related mplib format %s", file.basename(name)) os.remove(name) end end lfs.chdir(olddir) end function environment.run_format(name,data,more) -- hm, rather old code here; we can now use the file.whatever functions if name and name ~= "" then local barename = file.removesuffix(name) local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats") if fmtname == "" then fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or "" end fmtname = resolvers.cleanpath(fmtname) if fmtname == "" then report_format("no format with name: %s",name) else local barename = file.removesuffix(name) -- expanded name local luaname = file.addsuffix(barename,"luc") if not lfs.isfile(luaname) then luaname = file.addsuffix(barename,"lua") end if not lfs.isfile(luaname) then report_format("using format name: %s",fmtname) report_format("no luc/lua with name: %s",barename) else local command = format("luatex %s --fmt=%s --lua=%s %s %s",primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "") report_format("running command: %s",command) os.spawn(command) end end end end end -- of closure -- end library merge own = { } -- not local, might change own.libs = { -- order can be made better 'l-string.lua', 'l-lpeg.lua', 'l-table.lua', 'l-io.lua', 'l-number.lua', 'l-set.lua', 'l-os.lua', 'l-file.lua', 'l-md5.lua', 'l-url.lua', 'l-dir.lua', 'l-boolean.lua', 'l-unicode.lua', 'l-math.lua', 'util-tab.lua', 'util-sto.lua', 'util-mrg.lua', 'util-lua.lua', 'util-prs.lua', 'util-fmt.lua', 'util-deb.lua', 'trac-inf.lua', 'trac-set.lua', 'trac-log.lua', 'trac-pro.lua', 'luat-env.lua', -- can come before inf (as in mkiv) 'lxml-tab.lua', 'lxml-lpt.lua', -- 'lxml-ent.lua', 'lxml-mis.lua', 'lxml-aux.lua', 'lxml-xml.lua', 'data-ini.lua', 'data-exp.lua', 'data-env.lua', 'data-tmp.lua', 'data-met.lua', 'data-res.lua', 'data-pre.lua', 'data-inp.lua', 'data-out.lua', 'data-fil.lua', 'data-con.lua', 'data-use.lua', -- 'data-tex.lua', -- 'data-bin.lua', 'data-zip.lua', 'data-tre.lua', 'data-crl.lua', 'data-lua.lua', 'data-aux.lua', -- updater 'data-tmf.lua', 'data-lst.lua', 'luat-sta.lua', 'luat-fmt.lua', } -- We need this hack till luatex is fixed. -- -- for k,v in pairs(arg) do print(k,v) end if arg and (arg[0] == 'luatex' or arg[0] == 'luatex.exe') and arg[1] == "--luaonly" then arg[-1]=arg[0] arg[0]=arg[2] for k=3,#arg do arg[k-2]=arg[k] end arg[#arg]=nil arg[#arg]=nil end -- End of hack. local format, gsub, gmatch, match = string.format, string.gsub, string.gmatch, string.match local concat = table.concat own.name = (environment and environment.ownname) or arg[0] or 'mtxrun.lua' own.path = gsub(match(own.name,"^(.+)[\\/].-$") or ".","\\","/") local ownpath, owntree = own.path, environment and environment.ownpath or own.path own.list = { '.', ownpath , ownpath .. "/../sources", -- HH's development path owntree .. "/../../texmf-local/tex/context/base", owntree .. "/../../texmf-context/tex/context/base", owntree .. "/../../texmf-dist/tex/context/base", owntree .. "/../../texmf/tex/context/base", owntree .. "/../../../texmf-local/tex/context/base", owntree .. "/../../../texmf-context/tex/context/base", owntree .. "/../../../texmf-dist/tex/context/base", owntree .. "/../../../texmf/tex/context/base", } if own.path == "." then table.remove(own.list,1) end local function locate_libs() for l=1,#own.libs do local lib = own.libs[l] for p =1,#own.list do local pth = own.list[p] local filename = pth .. "/" .. lib local found = lfs.isfile(filename) if found then package.path = package.path .. ";" .. pth .. "/?.lua" -- in case l-* does a require return pth end end end end local function load_libs() local found = locate_libs() if found then for l=1,#own.libs do local filename = found .. "/" .. own.libs[l] local codeblob = loadfile(filename) if codeblob then codeblob() end end else resolvers = nil end end if not resolvers then load_libs() end if not resolvers then print("") print("Mtxrun is unable to start up due to lack of libraries. You may") print("try to run 'lua mtxrun.lua --selfmerge' in the path where this") print("script is located (normally under ..../scripts/context/lua) which") print("will make this script library independent.") os.exit() end -- verbosity local e_verbose = environment.arguments["verbose"] if e_verbose then trackers.enable("resolvers.locating") end -- some common flags (also passed through environment) local e_silent = environment.argument("silent") local e_noconsole = environment.argument("noconsole") local e_trackers = environment.argument("trackers") local e_directives = environment.argument("directives") local e_experiments = environment.argument("experiments") if e_silent == true then e_silent = "*" end if type(e_silent) == "string" then if type(e_directives) == "string" then e_directives = format("%s,logs.blocked={%s}",e_directives,e_silent) else e_directives = format("logs.blocked={%s}",e_silent) end end if e_noconsole then if type(e_directives) == "string" then e_directives = format("%s,logs.target=file",e_directives) else e_directives = format("logs.target=file") end end if e_trackers then trackers .enable(e_trackers) end if e_directives then directives .enable(e_directives) end if e_experiments then experiments.enable(e_experiments) end if not environment.trackers then environment.trackers = e_trackers end if not environment.directives then environment.directives = e_directives end if not environment.experiments then environment.experiments = e_experiments end -- local instance = resolvers.reset() local helpinfo = [[ --script run an mtx script (lua prefered method) (--noquotes), no script gives list --execute run a script or program (texmfstart method) (--noquotes) --resolve resolve prefixed arguments --ctxlua run internally (using preloaded libs) --internal run script using built in libraries (same as --ctxlua) --locate locate given filename --autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree' --tree=pathtotree use given texmf tree (default file: 'setuptex.tmf') --environment=name use given (tmf) environment file --path=runpath go to given path before execution --ifchanged=filename only execute when given file has changed (md checksum) --iftouched=old,new only execute when given file has changed (time stamp) --makestubs create stubs for (context related) scripts --removestubs remove stubs (context related) scripts --stubpath=binpath paths where stubs wil be written --windows create windows (mswin) stubs --unix create unix (linux) stubs --verbose give a bit more info --trackers=list enable given trackers --progname=str format or backend --edit launch editor with found file --launch (--all) launch files like manuals, assumes os support --timedrun run a script an time its run --autogenerate regenerate databases if needed (handy when used to run context in an editor) --usekpse use kpse as fallback (when no mkiv and cache installed, often slower) --forcekpse force using kpse (handy when no mkiv and cache installed but less functionality) --prefixes show supported prefixes --generate generate file database --variables show configuration variables --configurations show configuration order --expand-braces expand complex variable --expand-path expand variable (resolve paths) --expand-var expand variable (resolve references) --show-path show path expansion of ... --var-value report value of variable --find-file report file location --find-path report path of file --pattern=str filter variables ]] local application = logs.application { name = "mtxrun", banner = "ConTeXt TDS Runner Tool 1.30", helpinfo = helpinfo, } local report = application.report messages = messages or { } -- for the moment runners = runners or { } -- global (might become local) runners.applications = { ["lua"] = "luatex --luaonly", ["luc"] = "luatex --luaonly", ["pl"] = "perl", ["py"] = "python", ["rb"] = "ruby", } runners.suffixes = { 'rb', 'lua', 'py', 'pl' } runners.registered = { texexec = { 'texexec.rb', false }, -- context mkii runner (only tool not to be luafied) texutil = { 'texutil.rb', true }, -- old perl based index sorter for mkii (old versions need it) texfont = { 'texfont.pl', true }, -- perl script that makes mkii font metric files texfind = { 'texfind.pl', false }, -- perltk based tex searching tool, mostly used at pragma texshow = { 'texshow.pl', false }, -- perltk based context help system, will be luafied -- texwork = { 'texwork.pl', false }, -- perltk based editing environment, only used at pragma makempy = { 'makempy.pl', true }, mptopdf = { 'mptopdf.pl', true }, pstopdf = { 'pstopdf.rb', true }, -- converts ps (and some more) images, does some cleaning (replaced) -- examplex = { 'examplex.rb', false }, concheck = { 'concheck.rb', false }, runtools = { 'runtools.rb', true }, textools = { 'textools.rb', true }, tmftools = { 'tmftools.rb', true }, ctxtools = { 'ctxtools.rb', true }, rlxtools = { 'rlxtools.rb', true }, pdftools = { 'pdftools.rb', true }, mpstools = { 'mpstools.rb', true }, -- exatools = { 'exatools.rb', true }, xmltools = { 'xmltools.rb', true }, -- luatools = { 'luatools.lua', true }, mtxtools = { 'mtxtools.rb', true }, pdftrimwhite = { 'pdftrimwhite.pl', false } } runners.launchers = { windows = { }, unix = { } } -- like runners.libpath("framework"): looks on script's subpath function runners.libpath(...) package.prepend_libpath(file.dirname(environment.ownscript),...) package.prepend_libpath(file.dirname(environment.ownname) ,...) end function runners.prepare() local checkname = environment.argument("ifchanged") if type(checkname) == "string" and checkname ~= "" then local oldchecksum = file.loadchecksum(checkname) local newchecksum = file.checksum(checkname) if oldchecksum == newchecksum then if e_verbose then report("file '%s' is unchanged",checkname) end return "skip" elseif e_verbose then report("file '%s' is changed, processing started",checkname) end file.savechecksum(checkname) end local touchname = environment.argument("iftouched") if type(touchname) == "string" and touchname ~= "" then local oldname, newname = string.split(touchname, ",") if oldname and newname and oldname ~= "" and newname ~= "" then if not file.needs_updating(oldname,newname) then if e_verbose then report("file '%s' and '%s' have same age",oldname,newname) end return "skip" elseif e_verbose then report("file '%s' is older than '%s'",oldname,newname) end end end local runpath = environment.argument("path") if type(runpath) == "string" and not lfs.chdir(runpath) then report("unable to change to path '%s'",runpath) return "error" end runners.prepare = function() end return "run" end function runners.execute_script(fullname,internal,nosplit) local noquote = environment.argument("noquotes") if fullname and fullname ~= "" then local state = runners.prepare() if state == 'error' then return false elseif state == 'skip' then return true elseif state == "run" then local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), "" if path ~= "" then result = fullname elseif name then name = name:gsub("^int[%a]*:",function() internal = true return "" end ) name = name:gsub("^script:","") if suffix == "" and runners.registered[name] and runners.registered[name][1] then name = runners.registered[name][1] suffix = file.extname(name) end if suffix == "" then -- loop over known suffixes for _,s in pairs(runners.suffixes) do result = resolvers.findfile(name .. "." .. s, 'texmfscripts') if result ~= "" then break end end elseif runners.applications[suffix] then result = resolvers.findfile(name, 'texmfscripts') else -- maybe look on path result = resolvers.findfile(name, 'other text files') end end if result and result ~= "" then if not no_split then local before, after = environment.splitarguments(fullname) -- already done environment.arguments_before, environment.arguments_after = before, after end if internal then arg = { } for _,v in pairs(environment.arguments_after) do arg[#arg+1] = v end environment.ownscript = result dofile(result) else local binary = runners.applications[file.extname(result)] if binary and binary ~= "" then result = binary .. " " .. result end local command = result .. " " .. environment.reconstructcommandline(environment.arguments_after,noquote) if e_verbose then report() report("executing: %s",command) report() report() io.flush() end -- no os.exec because otherwise we get the wrong return value local code = os.execute(command) -- maybe spawn if code == 0 then return true else if binary then binary = file.addsuffix(binary,os.binsuffix) for p in gmatch(os.getenv("PATH"),"[^"..io.pathseparator.."]+") do if lfs.isfile(file.join(p,binary)) then return false end end report() report("This script needs '%s' which seems not to be installed.",binary) report() end return false end end end end end return false end function runners.execute_program(fullname) local noquote = environment.argument("noquotes") if fullname and fullname ~= "" then local state = runners.prepare() if state == 'error' then return false elseif state == 'skip' then return true elseif state == "run" then local before, after = environment.splitarguments(fullname) for k=1,#after do after[k] = resolvers.resolve(after[k]) end environment.initializearguments(after) fullname = fullname:gsub("^bin:","") local command = fullname .. " " .. (environment.reconstructcommandline(after or "",noquote) or "") report() report("executing: %s",command) report() report() io.flush() local code = os.exec(command) -- (fullname,unpack(after)) does not work / maybe spawn return code == 0 end end return false end -- the --usekpse flag will fallback (not default) on kpse (hm, we can better update mtx-stubs) local windows_stub = '@echo off\013\010setlocal\013\010set ownpath=%%~dp0%%\013\010texlua "%%ownpath%%mtxrun.lua" --usekpse --execute %s %%*\013\010endlocal\013\010' local unix_stub = '#!/bin/sh\010mtxrun --usekpse --execute %s \"$@\"\010' function runners.handle_stubs(create) local stubpath = environment.argument('stubpath') or '.' -- 'auto' no longer subpathssupported local windows = environment.argument('windows') or environment.argument('mswin') or false local unix = environment.argument('unix') or environment.argument('linux') or false if not windows and not unix then if os.platform == "unix" then unix = true else windows = true end end for _,v in pairs(runners.registered) do local name, doit = v[1], v[2] if doit then local base = gsub(file.basename(name), "%.(.-)$", "") if create then if windows then io.savedata(file.join(stubpath,base..".bat"),format(windows_stub,name)) report("windows stub for '%s' created",base) end if unix then io.savedata(file.join(stubpath,base),format(unix_stub,name)) report("unix stub for '%s' created",base) end else if windows and (os.remove(file.join(stubpath,base..'.bat')) or os.remove(file.join(stubpath,base..'.cmd'))) then report("windows stub for '%s' removed", base) end if unix and (os.remove(file.join(stubpath,base)) or os.remove(file.join(stubpath,base..'.sh'))) then report("unix stub for '%s' removed",base) end end end end end function runners.resolve_string(filename) if filename and filename ~= "" then runners.report_location(resolvers.resolve(filename)) end end function runners.locate_file(filename) -- differs from texmfstart where locate appends .com .exe .bat ... todo if filename and filename ~= "" then runners.report_location(resolvers.findgivenfile(filename)) end end function runners.locate_platform() runners.report_location(os.platform) end function runners.report_location(result) if e_verbose then reportline() if result and result ~= "" then report(result) else report("not found") end else io.write(result) end end function runners.edit_script(filename) -- we assume that gvim is present on most systems (todo: also in cnf file) local editor = os.getenv("MTXRUN_EDITOR") or os.getenv("TEXMFSTART_EDITOR") or os.getenv("EDITOR") or 'gvim' local rest = resolvers.resolve(filename) if rest ~= "" then local command = editor .. " " .. rest if e_verbose then report() report("starting editor: %s",command) report() report() end os.launch(command) end end function runners.save_script_session(filename, list) local t = { } for i=1,#list do local key = list[i] t[key] = environment.arguments[key] end io.savedata(filename,table.serialize(t,true)) end function runners.load_script_session(filename) if lfs.isfile(filename) then local t = io.loaddata(filename) if t then t = loadstring(t) if t then t = t() end for key, value in pairs(t) do environment.arguments[key] = value end end end end function resolvers.launch(str) -- maybe we also need to test on mtxrun.launcher.suffix environment -- variable or on windows consult the assoc and ftype vars and such local launchers = runners.launchers[os.platform] if launchers then local suffix = file.extname(str) if suffix then local runner = launchers[suffix] if runner then str = runner .. " " .. str end end end os.launch(str) end function runners.launch_file(filename) instance.allresults = true trackers.enable("resolvers.locating") local pattern = environment.arguments["pattern"] if not pattern or pattern == "" then pattern = filename end if not pattern or pattern == "" then report("provide name or --pattern=") else local t = resolvers.findfiles(pattern) if not t or #t == 0 then t = resolvers.findfiles("*/" .. pattern) end if not t or #t == 0 then t = resolvers.findfiles("*/" .. pattern .. "*") end if t and #t > 0 then if environment.arguments["all"] then for _, v in pairs(t) do report("launching %s", v) resolvers.launch(v) end else report("launching %s", t[1]) resolvers.launch(t[1]) end else report("no match for %s", pattern) end end end function runners.find_mtx_script(filename) local function found(name) local path = file.dirname(name) if path and path ~= "" then return false else local fullname = own and own.path and file.join(own.path,name) return io.exists(fullname) and fullname end end filename = file.addsuffix(filename,"lua") local basename = file.removesuffix(file.basename(filename)) local suffix = file.extname(filename) -- qualified path, raw name local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename if fullname and fullname ~= "" then return fullname end -- current path, raw name fullname = "./" .. filename fullname = io.exists(fullname) and fullname if fullname and fullname ~= "" then return fullname end -- mtx- prefix checking local mtxprefix = (filename:find("^mtx%-") and "") or "mtx-" -- context namespace, mtx-