summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2010-12-01 17:00:20 +0200
committerMarius <mariausol@gmail.com>2010-12-01 17:00:20 +0200
commit7ddd9fa6ca5b953fe385fa7ad464a408006bb993 (patch)
tree51f21910916292bfdc460ac076ef618ad79cb6b9
parent43b3007b67e55a65c5853402b973b82352a795b0 (diff)
downloadcontext-7ddd9fa6ca5b953fe385fa7ad464a408006bb993.tar.gz
beta 2010.12.01 15:28
-rw-r--r--scripts/context/lua/mtxrun.lua1345
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua1345
-rw-r--r--scripts/context/stubs/unix/mtxrun1345
-rw-r--r--tex/context/base/attr-col.lua9
-rw-r--r--tex/context/base/buff-ini.lua448
-rw-r--r--tex/context/base/buff-ini.mkiv23
-rw-r--r--tex/context/base/buff-ver.lua234
-rw-r--r--tex/context/base/buff-ver.mkiv77
-rw-r--r--tex/context/base/char-utf.lua6
-rw-r--r--tex/context/base/cldf-com.lua14
-rw-r--r--tex/context/base/colo-ext.mkiv15
-rw-r--r--tex/context/base/colo-icc.lua4
-rw-r--r--tex/context/base/colo-ini.lua86
-rw-r--r--tex/context/base/cont-new.mkiv8
-rw-r--r--tex/context/base/cont-new.tex9
-rw-r--r--tex/context/base/context.tex2
-rw-r--r--tex/context/base/data-bin.lua17
-rw-r--r--tex/context/base/data-crl.lua54
-rw-r--r--tex/context/base/data-exp.lua274
-rw-r--r--tex/context/base/data-fil.lua112
-rw-r--r--tex/context/base/data-inp.lua20
-rw-r--r--tex/context/base/data-lua.lua7
-rw-r--r--tex/context/base/data-met.lua123
-rw-r--r--tex/context/base/data-out.lua10
-rw-r--r--tex/context/base/data-res.lua378
-rw-r--r--tex/context/base/data-sch.lua87
-rw-r--r--tex/context/base/data-tex.lua372
-rw-r--r--tex/context/base/data-tre.lua55
-rw-r--r--tex/context/base/data-vir.lua81
-rw-r--r--tex/context/base/data-zip.lua158
-rw-r--r--tex/context/base/font-ini.mkiv6
-rw-r--r--tex/context/base/grph-fig.mkiv12
-rw-r--r--tex/context/base/l-dir.lua49
-rw-r--r--tex/context/base/l-file.lua3
-rw-r--r--tex/context/base/l-lpeg.lua36
-rw-r--r--tex/context/base/l-string.lua2
-rw-r--r--tex/context/base/l-url.lua159
-rw-r--r--tex/context/base/lpdf-fmt.lua2
-rw-r--r--tex/context/base/luat-lib.mkiv6
-rw-r--r--tex/context/base/luat-mac.lua67
-rw-r--r--tex/context/base/lxml-ctx.lua2
-rw-r--r--tex/context/base/lxml-tex.lua6
-rw-r--r--tex/context/base/m-database.lua6
-rw-r--r--tex/context/base/m-pstricks.lua2
-rw-r--r--tex/context/base/math-def.mkiv2
-rw-r--r--tex/context/base/math-noa.lua14
-rw-r--r--tex/context/base/meta-ini.mkiv2
-rw-r--r--tex/context/base/mult-cld.lua7
-rw-r--r--tex/context/base/mult-ini.lua5
-rw-r--r--tex/context/base/node-pag.lua2
-rw-r--r--tex/context/base/node-par.lua2
-rw-r--r--tex/context/base/node-pro.lua4
-rw-r--r--tex/context/base/node-shp.lua2
-rw-r--r--tex/context/base/node-tsk.lua157
-rw-r--r--tex/context/base/regi-ini.lua33
-rw-r--r--tex/context/base/scrn-int.mkiv2
-rw-r--r--tex/context/base/spac-ver.lua4
-rw-r--r--tex/context/base/strc-blk.lua4
-rw-r--r--tex/context/base/strc-ini.lua2
-rw-r--r--tex/context/base/strc-ini.mkiv2
-rw-r--r--tex/context/base/strc-mat.mkiv4
-rw-r--r--tex/context/base/strc-not.mkiv15
-rw-r--r--tex/context/base/strc-sec.mkiv2
-rw-r--r--tex/context/base/supp-fil.lua75
-rw-r--r--tex/context/base/tabl-tab.mkiv1171
-rw-r--r--tex/context/base/tabl-tbl.lua3
-rw-r--r--tex/context/base/tabl-tbl.mkiv409
-rw-r--r--tex/context/base/task-ini.lua24
-rw-r--r--tex/context/base/util-seq.lua116
-rw-r--r--tex/generic/context/luatex-fonts-merged.lua43
70 files changed, 5015 insertions, 4167 deletions
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 3be305bed..6b74022ae 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -119,7 +119,7 @@ local patterns_escapes = {
["."] = "%.",
["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
+ ["("] = "%(", [")"] = "%)",
-- ["{"] = "%{", ["}"] = "%}"
-- ["^"] = "%^", ["$"] = "%$",
}
@@ -185,6 +185,7 @@ local patterns = lpeg.patterns
local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
local Ct, C, Cs, Cc, Cf, Cg = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local lpegtype = lpeg.type
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -201,7 +202,6 @@ patterns.alwaysmatched = alwaysmatched
local digit, sign = R('09'), S('+-')
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
local newline = crlf + cr + lf
-local utf8next = R("\128\191")
local escaped = P("\\") * anything
local squote = P("'")
local dquote = P('"')
@@ -222,6 +222,8 @@ local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le
+ utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+local utf8next = R("\128\191")
+
patterns.utf8one = R("\000\127")
patterns.utf8two = R("\194\223") * utf8next
patterns.utf8three = R("\224\239") * utf8next * utf8next
@@ -432,19 +434,25 @@ end
-- Just for fun I looked at the used bytecode and
-- p = (p and p + pp) or pp gets one more (testset).
-function lpeg.replacer(t)
- if #t > 0 then
- local p
- for i=1,#t do
- local ti= t[i]
- local pp = P(ti[1]) / ti[2]
- if p then
- p = p + pp
- else
- p = pp
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
end
+ return Cs((p + 1)^0)
end
- return Cs((p + 1)^0)
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
end
end
@@ -646,6 +654,10 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
end -- of closure
@@ -2558,6 +2570,9 @@ local separator = P("://")
local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
local rootbased = P("/") + letter*P(":")
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
+
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)
@@ -2678,72 +2693,95 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
+local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
--- from the spec (on the web):
+-- from wikipedia:
--
--- foo://example.com:8042/over/there?name=ferret#nose
--- \_/ \______________/\_________/ \_________/ \__/
--- | | | | |
--- scheme authority path query fragment
--- | _____________________|__
--- / \ / \
--- urn:example:animal:ferret:nose
+-- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
+-- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
+-- | | | | | | | |
+-- | userinfo hostname port | | query fragment
+-- | \________________________________/\_____________|____|/
+-- scheme | | | |
+-- | authority path | |
+-- | | |
+-- | path interpretable as filename
+-- | ___________|____________ |
+-- / \ / \ |
+-- urn:example:animal:ferret:nose interpretable as extension
url = url or { }
local url = url
-local function tochar(s)
- return char(tonumber(s,16))
-end
+local tochar = function(s) return char(tonumber(s,16)) end
-local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
+local colon = P(":")
+local qmark = P("?")
+local hash = P("#")
+local slash = P("/")
+local percent = P("%")
+local endofstring = P(-1)
-local hexdigit = lpegR("09","AF","af")
-local plus = lpegP("+")
-local nothing = lpegCc("")
-local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
+local hexdigit = R("09","AF","af")
+local plus = P("+")
+local nothing = Cc("")
+local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
-local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
-local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
-
-local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * Cs((escaped+(1- hash))^0) + nothing
+local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
-lpeg.patterns.urlsplitter = parser
+local parser = Ct(scheme * authority * path * query * fragment)
-local escapes = { }
+lpegpatterns.urlsplitter = parser
-for i=0,255 do
- escapes[i] = format("%%%02X",i)
-end
+local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-lpeg.patterns.urlescaper = escaper
+lpegpatterns.urlescaper = escaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
-function url.split(str)
+local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local function hasscheme(str)
+ local scheme = lpegmatch(scheme,str) -- at least one character
+ return scheme and scheme ~= ""
+end
+
-- todo: cache them
-function url.hashed(str) -- not yet ok (/test?test)
- local s = url.split(str)
+local rootletter = R("az","AZ")
+ + S("_-+")
+local separator = P("://")
+local qualified = P(".")^0 * P("/")
+ + rootletter * P(":")
+ + rootletter^1 * separator
+ + rootletter^1 * P("/")
+local rootbased = P("/")
+ + rootletter * P(":")
+
+local barswapper = replacer("|",":")
+local backslashswapper = replacer("\\","/")
+
+local function hashed(str) -- not yet ok (/test?test)
+ local s = split(str)
local somescheme = s[1] ~= ""
local somequery = s[4] ~= ""
if not somescheme and not somequery then
- return {
+ s = {
scheme = "file",
authority = "",
path = str,
@@ -2751,52 +2789,73 @@ function url.hashed(str) -- not yet ok (/test?test)
fragment = "",
original = str,
noscheme = true,
+ filename = str,
}
- else
- return {
+ else -- not always a filename but handy anyway
+ local authority, path, filename = s[2], s[3]
+ if authority == "" then
+ filename = path
+ else
+ filename = authority .. "/" .. path
+ end
+ s = {
scheme = s[1],
- authority = s[2],
- path = s[3],
+ authority = authority,
+ path = path,
query = s[4],
fragment = s[5],
original = str,
noscheme = false,
+ filename = filename,
}
end
+ return s
end
+-- Here we assume:
+--
+-- files: /// = relative
+-- files: //// = absolute (!)
+
-function url.hasscheme(str)
- return url.split(str)[1] ~= ""
-end
-function url.addscheme(str,scheme)
- return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
+url.split = split
+url.hasscheme = hasscheme
+url.hashed = hashed
+
+function url.addscheme(str,scheme) -- no authority
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///" .. str
+ else
+ return scheme .. ":///" .. str
+ end
end
function url.construct(hash) -- dodo: we need to escape !
- local fullurl = { }
+ local fullurl, f = { }, 0
local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
if scheme and scheme ~= "" then
- fullurl[#fullurl+1] = scheme .. "://"
+ f = f + 1 ; fullurl[f] = scheme .. "://"
end
if authority and authority ~= "" then
- fullurl[#fullurl+1] = authority
+ f = f + 1 ; fullurl[f] = authority
end
if path and path ~= "" then
- fullurl[#fullurl+1] = "/" .. path
+ f = f + 1 ; fullurl[f] = "/" .. path
end
if query and query ~= "" then
- fullurl[#fullurl+1] = "?".. query
+ f = f + 1 ; fullurl[f] = "?".. query
end
if fragment and fragment ~= "" then
- fullurl[#fullurl+1] = "#".. fragment
+ f = f + 1 ; fullurl[f] = "#".. fragment
end
return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
- local t = url.hashed(filename)
+ local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
@@ -2820,6 +2879,7 @@ end
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -2861,25 +2921,22 @@ end
-- optimizing for no find (*) does not save time
+
local function globpattern(path,patt,recurse,action)
- local ok, scanner
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
+ for name in walkdir(path) do
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if find(full,patt) then
+ action(full)
end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ globpattern(full,patt,recurse,action)
end
end
end
@@ -9363,10 +9420,10 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
local type, next = type, next
local ostype = os.type
@@ -9381,7 +9438,7 @@ local resolvers = resolvers
-- As this bit of code is somewhat special it gets its own module. After
-- all, when working on the main resolver code, I don't want to scroll
--- past this every time.
+-- past this every time. See data-obs.lua for the gsub variant.
-- {a,b,c,d}
-- a,b,c/{p,q,r},d
@@ -9396,95 +9453,70 @@ local resolvers = resolvers
-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local dummy_path_expr = "^!*unset/*$"
-
-local function do_first(a,b)
+local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = a .. s
+ n = n + 1 ; t[n] = a .. s
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_second(a,b)
+local function f_second(a,b)
local t, n = { }, 0
for s in gmatch(a,"[^,]+") do
- n = n + 1
- t[n] = s .. b
+ n = n + 1 ; t[n] = s .. b
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_both(a,b)
+local function f_both(a,b)
local t, n = { }, 0
for sa in gmatch(a,"[^,]+") do
for sb in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = sa .. sb
+ n = n + 1 ; t[n] = sa .. sb
end
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_three(a,b,c)
- return a .. b.. c
-end
+local left = P("{")
+local right = P("}")
+local var = P((1 - S("{}" ))^0)
+local set = P((1 - S("{},"))^0)
+local other = P(1)
-local stripper_1 = lpeg.stripper("{}@")
+local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
+local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
+local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
+local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
-local replacer_1 = lpeg.replacer {
- { ",}", ",@}" },
- { "{,", "{@," },
-}
+local stripper_1 = lpeg.stripper ("{}@")
+local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
-local function splitpathexpr(str, newlist, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
+local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
if trace_expansions then
report_resolvers("expanding variable '%s'",str)
end
local t, ok, done = newlist or { }, false, false
local n = #t
str = lpegmatch(replacer_1,str)
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
+ repeat local old = str
+ repeat local old = str ; str = lpegmatch(l_first, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_second,str) until old == str
+ repeat local old = str ; str = lpegmatch(l_both, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_rest, str) until old == str
+ until old == str -- or not find(str,"{")
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
if trace_expansions then
@@ -9495,50 +9527,23 @@ local function splitpathexpr(str, newlist, validate)
return t
end
+-- We could make the previous one public.
+
local function validate(s)
- local isrecursive = find(s,"//$")
- s = collapsepath(s)
- if isrecursive then
- s = s .. "//"
- end
- return s ~= "" and not find(s,dummy_path_expr) and s
+ s = collapsepath(s) -- already keeps the //
+ return s ~= "" and not find(s,"^!*unset/*$") and s
end
resolvers.validatedpath = validate -- keeps the trailing //
-function resolvers.expandedpathfromlist(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
+function resolvers.expandedpathfromlist(pathlist)
+ local newlist = { }
for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- local n = 0
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = validate(p)
- if p ~= "" then
- n = n + 1
- newlist[n] = p
- end
- end
- end
+ splitpathexpr(pathlist[k],newlist,validate)
end
return newlist
end
--- We also put some cleanup code here.
-
-
-
-
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
@@ -9576,14 +9581,13 @@ end
-- This one strips quotes and funny tokens.
+local expandhome = P("~") / "$HOME" -- environment.homedir
-local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
+local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
+local dostring = (expandhome + 1 )^0
-local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
-local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
-local dostring = (expandhome + 1 )^0
-
-local stripper = lpegCs(
+local stripper = Cs(
lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
)
@@ -9599,7 +9603,9 @@ end
local cache = { }
-local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+local splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local backslashswapper = lpeg.replacer("\\","/")
local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
if str then
@@ -9608,8 +9614,7 @@ local function splitconfigurationpath(str) -- beware, this can be either a path
if str == "" then
found = { }
else
- str = gsub(str,"\\","/")
- local split = lpegmatch(splitter,str)
+ local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
found = { }
local noffound = 0
for i=1,#split do
@@ -9658,57 +9663,62 @@ end
-local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function resolvers.scanfiles(specification)
- if trace_locating then
- report_resolvers("scanning path '%s'",specification)
- end
- local attributes, directory = lfs.attributes, lfs.dir
- local files = { __path__ = specification }
- local n, m, r = 0, 0, 0
- local function scan(spec,path)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- if path ~= "" then
- dirs[#dirs+1] = path..'/'..name
+local attributes, directory = lfs.attributes, lfs.dir
+
+local function scan(files,spec,path,n,m,r)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs, nofdirs = { }, 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
else
- dirs[#dirs+1] = name
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
end
end
+ elseif mode == 'directory' then
+ m = m + 1
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
end
end
- if #dirs > 0 then
- sort(dirs)
- for i=1,#dirs do
- scan(spec,dirs[i])
- end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
- scan(specification .. '/',"")
- files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ return files, n, m, r
+end
+
+function resolvers.scanfiles(path)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",path)
+ end
+ local files, n, m, r = scan({ },path .. '/',"",0,0,0)
+ files.__path__ = path
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
@@ -10399,9 +10409,15 @@ if not modules then modules = { } end modules ['data-met'] = {
license = "see context related readme files"
}
-local find = string.find
+local find, format = string.find, string.format
+local sequenced = table.sequenced
+local addurlscheme, urlhashed = url.addscheme, url.hashed
+
+local trace_locating = false
+
+trackers.register("resolvers.locating", function(v) trace_methods = v end)
+trackers.register("resolvers.methods", function(v) trace_methods = v end)
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
@@ -10409,41 +10425,109 @@ local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.concatinators = allocate ()
-resolvers.locators = allocate { notfound = { nil } } -- locate databases
-resolvers.hashers = allocate { notfound = { nil } } -- load databases
-resolvers.generators = allocate { notfound = { nil } } -- generate databases
+local registered = { }
-function resolvers.splitmethod(filename) -- todo: trigger by suffix
+local function splitmethod(filename) -- todo: filetype in specification
if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
+ return { scheme = "unknown", original = filename }
+ end
+ if type(filename) == "table" then
return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original = filename } -- quick hack
+ end
+ filename = file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
+ end
+ local specification = url.hashed(filename)
+ if not specification.scheme or specification.scheme == "" then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
else
- return url.hashed(filename)
+ return specification
end
end
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapsepath(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- local resolver = resolvers[what]
- if resolver[scheme] then
- if trace_locating then
- report_resolvers("using special handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+resolvers.splitmethod = splitmethod -- bad name but ok
+
+-- the second argument is always analyzed (saves time later on) and the original
+-- gets passed as original but also as argument
+
+local function methodhandler(what,first,...) -- filename can be nil or false
+ local method = registered[what]
+ if method then
+ local how, namespace = method.how, method.namespace
+ if how == "uri" or how == "url" then
+ local specification = splitmethod(first)
+ local scheme = specification.scheme
+ local resolver = namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, no handler",what,how)
+ end
+ end
+ elseif how == "tag" then
+ local resolver = namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, tag=%s",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default",what,how)
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, unknown",what,how)
+ end
+ end
end
- return resolver[scheme](filename,filetype,specification) -- todo: query
else
- if trace_locating then
- report_resolvers("no handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+ report_resolvers("resolver: method=%s, unknown",what)
+ end
+end
+
+resolvers.methodhandler = methodhandler
+
+function resolvers.registermethod(name,namespace,how)
+ registered[name] = { how = how or "tag", namespace = namespace }
+ namespace["byscheme"] = function(scheme,filename,...)
+ if scheme == "file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
end
- return resolver.tex(filename,filetype) -- todo: specification
end
end
+local concatinators = allocate { notfound = file.join } -- concatinate paths
+local locators = allocate { notfound = function() end } -- locate databases
+local hashers = allocate { notfound = function() end } -- load databases
+local generators = allocate { notfound = function() end } -- generate databases
+
+resolvers.concatinators = concatinators
+resolvers.locators = locators
+resolvers.hashers = hashers
+resolvers.generators = generators
+
+local registermethod = resolvers.registermethod
+
+registermethod("concatinators",concatinators,"tag")
+registermethod("locators", locators, "uri")
+registermethod("hashers", hashers, "uri")
+registermethod("generators", generators, "uri")
end -- of closure
@@ -10471,11 +10555,11 @@ local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
local os = os
-local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local collapsepath = file.collapsepath
+local collapsepath, joinpath = file.collapsepath, file.joinpath
local allocate = utilities.storage.allocate
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10489,6 +10573,7 @@ local resolvers = resolvers
local expandedpathfromlist = resolvers.expandedpathfromlist
local checkedvariable = resolvers.checkedvariable
local splitconfigurationpath = resolvers.splitconfigurationpath
+local methodhandler = resolvers.methodhandler
local initializesetter = utilities.setters.initialize
@@ -10502,12 +10587,12 @@ resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-local unset_variable = "unset"
+local unset_variable = "unset"
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
+local formats = resolvers.formats
+local suffixes = resolvers.suffixes
+local dangerous = resolvers.dangerous
+local suffixmap = resolvers.suffixmap
resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
@@ -10552,7 +10637,7 @@ function resolvers.newinstance()
end
-function resolvers.setinstance(someinstance)
+function resolvers.setinstance(someinstance) -- only one instance is active
instance = someinstance
resolvers.instance = someinstance
return someinstance
@@ -10574,7 +10659,7 @@ function resolvers.setenv(key,value)
end
end
-function resolvers.getenv(key)
+local function getenv(key)
local value = instance.environment[key]
if value and value ~= "" then
return value
@@ -10584,23 +10669,55 @@ function resolvers.getenv(key)
end
end
-resolvers.env = resolvers.getenv
+resolvers.getenv = getenv
+resolvers.env = getenv
+
+local function resolve(key)
+ local value = instance.variables[key] or ""
+ return (value ~= "" and value) or getenv(key) or ""
+end
+
+local dollarstripper = lpeg.stripper("$")
+local inhibitstripper = P("!")^0 * Cs(P(1)^0)
+local backslashswapper = lpeg.replacer("\\","/")
+
+local somevariable = P("$") / ""
+local somekey = C(R("az","AZ","09","__","--")^1)
+local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
+ + P(";") * (P(";") / "")
+ + P(1)
+
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
local function expandvars(lst) -- simple vars
- local variables, getenv = instance.variables, resolvers.getenv
- local function resolve(a)
- local va = variables[a] or ""
- return (va ~= "" and va) or getenv(a) or ""
- end
for k=1,#lst do
- local var = lst[k]
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- lst[k] = var
+ local lk = lst[k]
+ lst[k] = lpegmatch(pattern,lk) or lk
+ end
+end
+
+
+local slash = P("/")
+
+local pattern = Cs (
+ Cc("^") * (
+ Cc("%") * S(".-")
+ + slash^2 * P(-1) / "/.*"
+ + slash^2 / "/.-/"
+ + (1-slash) * P(-1) * Cc("/")
+ + P(1)
+ )^1 * Cc("$")
+)
+
+local function makepathexpression(str)
+ if str == "." then
+ return "^%./$"
+ else
+ return lpegmatch(pattern,str)
end
end
+
local function resolve(key)
local value = instance.variables[key]
if value and value ~= "" then
@@ -10614,22 +10731,21 @@ local function resolve(key)
return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
+
local function expandedvariable(var) -- simple vars
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- return var
+ return lpegmatch(pattern,var) or var
end
+
local function entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- local result = entries[name..'.'..instance.progname] or entries[name]
+ name = lpegmatch(dollarstripper,name)
local result = entries[instance.progname .. '.' .. name] or entries[name]
if result then
return result
else
- result = resolvers.getenv(name)
+ result = getenv(name)
if result then
instance.variables[name] = result
resolvers.expandvariables()
@@ -10642,8 +10758,7 @@ end
local function is_entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- return (entries[name..'.'..instance.progname] or entries[name]) ~= nil
+ name = lpegmatch(dollarstripper,name)
return (entries[instance.progname .. '.' .. name] or entries[name]) ~= nil
else
return false
@@ -10654,7 +10769,7 @@ local function reportcriticalvariables()
if trace_locating then
for i=1,#resolvers.criticalvars do
local v = resolvers.criticalvars[i]
- report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
+ report_resolvers("variable '%s' set to '%s'",v,getenv(v) or "unknown")
end
report_resolvers()
end
@@ -10664,7 +10779,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = resolvers.getenv('TEXMFCNF')
+ local cnfspec = getenv('TEXMFCNF')
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -10736,7 +10851,6 @@ local function load_configuration_files()
end
end
setups[pathname] = t
-
if resolvers.luacnfstate == "default" then
-- the following code is not tested
local cnfspec = t["TEXMFCNF"]
@@ -10798,63 +10912,30 @@ end
-- database loading
--- locators
-
-function resolvers.locatedatabase(specification)
- return resolvers.methodhandler('locators', specification)
-end
-
-function resolvers.locators.tex(specification)
- if specification and specification ~= '' and lfs.isdir(specification) then
- if trace_locating then
- report_resolvers("tex locator '%s' found",specification)
- end
- resolvers.appendhash('file',specification,filename,true) -- cache
- elseif trace_locating then
- report_resolvers("tex locator '%s' not found",specification)
- end
-end
-
--- hashers
-
-function resolvers.hashdatabase(tag,name)
- return resolvers.methodhandler('hashers',tag,name)
-end
-
local function load_file_databases()
instance.loaderror, instance.files = false, allocate()
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- resolvers.hashdatabase(hash.tag,hash.name)
+ resolvers.hashers.byscheme(hash.type,hash.name)
if instance.loaderror then break end
end
end
end
-function resolvers.hashers.tex(tag,name) -- used where?
- local content = caches.loadcontent(tag,'files')
- if content then
- instance.files[tag] = content
- else
- instance.files[tag] = { }
- instance.loaderror = true
- end
-end
-
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
local texmfpaths = resolvers.expandedpathlist('TEXMF')
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
- local stripped = gsub(path,"^!!","")
- local runtime = stripped == path
- path = resolvers.cleanpath(path)
+ local stripped = lpegmatch(inhibitstripper,path)
if stripped ~= "" then
+ local runtime = stripped == path
+ path = resolvers.cleanpath(path)
if lfs.isdir(path) then
local spec = resolvers.splitmethod(stripped)
- if spec.scheme == "cache" then
+ if spec.scheme == "cache" or spec.scheme == "file" then
stripped = spec.path
elseif runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
@@ -10866,7 +10947,7 @@ local function locate_file_databases()
report_resolvers("locating list of '%s' (cached)",path)
end
end
- resolvers.locatedatabase(stripped) -- nothing done with result
+ methodhandler('locators',stripped) -- nothing done with result
else
if trace_locating then
if runtime then
@@ -10885,8 +10966,9 @@ end
local function generate_file_databases()
local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.methodhandler('generators',hashes[i].tag)
+ for k=1,#hashes do
+ local hash = hashes[k]
+ methodhandler('generators',hash.name)
end
if trace_locating then
report_resolvers()
@@ -10896,10 +10978,13 @@ end
local function save_file_databases() -- will become cachers
for i=1,#instance.hashes do
local hash = instance.hashes[i]
- local cachename = hash.tag
+ local cachename = hash.name
if hash.cache then
local content = instance.files[cachename]
caches.collapsecontent(content)
+ if trace_locating then
+ report_resolvers("saving tree '%s'",cachename)
+ end
caches.savecontent(cachename,"files",content)
elseif trace_locating then
report_resolvers("not saving runtime tree '%s'",cachename)
@@ -10923,23 +11008,22 @@ local function load_databases()
end
end
-function resolvers.appendhash(type,tag,name,cache)
+function resolvers.appendhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' appended",tag)
+ report_resolvers("hash '%s' appended",name)
end
- insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, { type = type, name = name, cache = cache } )
end
-function resolvers.prependhash(type,tag,name,cache)
+function resolvers.prependhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' prepended",tag)
+ report_resolvers("hash '%s' prepended",name)
end
- insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expandedpathlist('TEXMF') -- full expansion
- local t = resolvers.splitpath(resolvers.getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv('TEXMF'))
insert(t,1,specification)
local newspec = concat(t,";")
if instance.environment["TEXMF"] then
@@ -10953,10 +11037,6 @@ function resolvers.extendtexmfvariable(specification) -- crap, we could better p
reset_hashes()
end
-function resolvers.generators.tex(specification,tag)
- instance.files[tag or specification] = resolvers.scanfiles(specification)
-end
-
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
@@ -10986,9 +11066,20 @@ function resolvers.datastate()
return caches.contentstate()
end
+local function resolve(a)
+ return instance.expansions[a] or getenv(a)
+end
+
+local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
+
+local variable = R("az","AZ","09","__","--")^1 / resolve
+ variable = (P("$")/"") * (variable + (P("{")/"") * variable * (P("}")/""))
+
+ cleaner = Cs((cleaner + P(1))^0)
+ variable = Cs((variable + P(1))^0)
+
function resolvers.expandvariables()
local expansions, environment, variables = allocate(), instance.environment, instance.variables
- local getenv = resolvers.getenv
instance.expansions = expansions
local engine, progname = instance.engine, instance.progname
if type(engine) ~= "string" then instance.engine, engine = "", "" end
@@ -10996,12 +11087,7 @@ function resolvers.expandvariables()
if engine ~= "" then environment['engine'] = engine end
if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
- -- local a, b = match(k,"^(%a+)%_(.*)%s*$") -- too many vars have an _ in the name
- -- if a and b then -- so let's forget about it; it was a
- -- expansions[a..'.'..b] = v -- hack anyway for linux and not needed
- -- else -- anymore as we now have directives
- expansions[k] = v
- -- end
+ expansions[k] = v
end
for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
@@ -11009,26 +11095,19 @@ function resolvers.expandvariables()
for k,v in next, variables do -- move variables to expansions
if not expansions[k] then expansions[k] = v end
end
- local busy = false
- local function resolve(a)
- busy = true
- return expansions[a] or getenv(a)
- end
- while true do
- busy = false
+ repeat
+ local busy = false
for k,v in next, expansions do
- local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
- local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
- if n > 0 or m > 0 then
- s = gsub(s,";+",";")
- s = gsub(s,";[!{}/\\]+;",";")
- expansions[k]= s
+ local s = lpegmatch(variable,v)
+ if s ~= v then
+ busy = true
+ expansions[k] = s
end
end
- if not busy then break end
- end
+ until not busy
+
for k,v in next, expansions do
- expansions[k] = gsub(v,"\\", '/')
+ expansions[k] = lpegmatch(cleaner,v)
end
end
@@ -11055,7 +11134,7 @@ function resolvers.unexpandedpathlist(str)
end
function resolvers.unexpandedpath(str)
- return file.joinpath(resolvers.unexpandedpathlist(str))
+ return joinpath(resolvers.unexpandedpathlist(str))
end
local done = { }
@@ -11169,7 +11248,7 @@ function resolvers.cleanpathlist(str)
end
function resolvers.expandpath(str)
- return file.joinpath(resolvers.expandedpathlist(str))
+ return joinpath(resolvers.expandedpathlist(str))
end
function resolvers.expandedpathlist(str)
@@ -11177,7 +11256,7 @@ function resolvers.expandedpathlist(str)
return ep or { } -- ep ?
elseif instance.savelists then
-- engine+progname hash
- str = gsub(str,"%$","")
+ str = lpegmatch(dollarstripper,str)
if not instance.lists[str] then -- cached
local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
instance.lists[str] = expandedpathfromlist(lst)
@@ -11190,28 +11269,34 @@ function resolvers.expandedpathlist(str)
end
function resolvers.expandedpathlistfromvariable(str) -- brrr
- local tmp = resolvers.variableofformatorsuffix(gsub(str,"%$",""))
- if tmp ~= "" then
- return resolvers.expandedpathlist(tmp)
- else
- return resolvers.expandedpathlist(str)
- end
+ str = lpegmatch(dollarstripper,str)
+ local tmp = resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
end
function resolvers.expandpathfromvariable(str)
- return file.joinpath(resolvers.expandedpathlistfromvariable(str))
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expandedpathfromlist(resolvers.splitpath(ori))
- return file.joinpath(pth)
+ return joinpath(pth)
end
-resolvers.isreadable = { }
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name] = content
+ else
+ instance.files[name] = { }
+ if somerror == true then -- can be unset
+ instance.loaderror = someerror
+ end
+ end
+end
-function resolvers.isreadable.file(name)
- local readable = lfs.isfile(name) -- brrr
+function isreadable(name)
+ local readable = file.is_readable(name)
if trace_detail then
if readable then
report_resolvers("file '%s' is readable",name)
@@ -11222,8 +11307,6 @@ function resolvers.isreadable.file(name)
return readable
end
-resolvers.isreadable.tex = resolvers.isreadable.file
-
-- name
-- name/name
@@ -11244,7 +11327,7 @@ local function collect_files(names)
local hashes = instance.hashes
for h=1,#hashes do
local hash = hashes[h]
- local blobpath = hash.tag
+ local blobpath = hash.name
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
@@ -11265,7 +11348,7 @@ local function collect_files(names)
if not dname or find(blobfile,dname) then
local kind = hash.type
local search = filejoin(blobpath,blobfile,bname)
- local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -11278,7 +11361,7 @@ local function collect_files(names)
if not dname or find(vv,dname) then
local kind = hash.type
local search = filejoin(blobpath,vv,bname)
- local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -11316,6 +11399,8 @@ local function can_be_dir(name) -- can become local
return fakepaths[name] == 1
end
+local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
+
local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
local result = { }
local stamp = nil
@@ -11333,7 +11418,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
end
if not dangerous[askedformat] then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_detail then
report_resolvers("file '%s' found directly",filename)
end
@@ -11349,7 +11434,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
result = resolvers.findwildcardfiles(filename) -- we can use th elocal
elseif file.is_qualified_path(filename) then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_locating then
report_resolvers("qualified name '%s'", filename)
end
@@ -11362,7 +11447,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for i=1,#format_suffixes do
local s = format_suffixes[i]
forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
+ if isreadable(forcedname) then
if trace_locating then
report_resolvers("no suffix, forcing format filetype '%s'", s)
end
@@ -11376,7 +11461,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
local basename = filebasename(filename)
- local pattern = gsub(filename .. "$","([%.%-])","%%%1")
+ local pattern = lpegmatch(preparetreepattern,filename)
-- messy .. to be sorted out
local savedformat = askedformat
local format = savedformat or ""
@@ -11471,7 +11556,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
- if fname and resolvers.isreadable.file(fname) then
+ if fname and isreadable(fname) then
filename, done = fname, true
result[#result+1] = filejoin('.',fname)
break
@@ -11497,26 +11582,15 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
if trace_detail then
report_resolvers("checking filename '%s'",filename)
end
- -- a bit messy ... esp the doscan setting here
- local doscan
for k=1,#pathlist do
local path = pathlist[k]
- if find(path,"^!!") then doscan = false else doscan = true end
- local pathname = gsub(path,"^!+", '')
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
done = false
-- using file list
if filelist then
- local expression
-- compare list entries with permitted pattern -- /xx /xx//
- if not find(pathname,"/$") then
- expression = pathname .. "/"
- else
- expression = pathname
- end
- expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
- expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
- expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
- expression = "^" .. expression .. "$"
+ local expression = makepathexpression(pathname)
if trace_detail then
report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
@@ -11545,7 +11619,8 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
if not done and doscan then
-- check if on disk / unchecked / does not work at all / also zips
- if resolvers.splitmethod(pathname).scheme == 'file' then -- ?
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
local pname = gsub(pathname,"%.%*$",'')
if not find(pname,"%*") then
local ppname = gsub(pname,"/+$","")
@@ -11553,7 +11628,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for k=1,#wantedfiles do
local w = wantedfiles[k]
local fname = filejoin(ppname,w)
- if resolvers.isreadable.file(fname) then
+ if isreadable(fname) then
if trace_detail then
report_resolvers("found '%s' by scanning",fname)
end
@@ -11586,9 +11661,6 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
return result
end
-resolvers.concatinators.tex = filejoin
-resolvers.concatinators.file = resolvers.concatinators.tex
-
local function findfiles(filename,filetype,allresults)
local result = collect_instance_files(filename,filetype or "",allresults)
if #result == 0 then
@@ -11609,7 +11681,7 @@ function resolvers.findfile(filename,filetype)
end
function resolvers.findpath(filename,filetype)
- return file.dirname(findfiles(filename,filetype,false)[1] or "")
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
local function findgivenfiles(filename,allresults)
@@ -11617,7 +11689,7 @@ local function findgivenfiles(filename,allresults)
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local files = instance.files[hash.tag] or { }
+ local files = instance.files[hash.name] or { }
local blist = files[bname]
if not blist then
local rname = "remap:"..bname
@@ -11629,12 +11701,12 @@ local function findgivenfiles(filename,allresults)
end
if blist then
if type(blist) == 'string' then
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
if not allresults then break end
else
for kk=1,#blist do
local vv = blist[kk]
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
if not allresults then break end
end
end
@@ -11657,14 +11729,14 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
if type(blist) == 'string' then
-- make function and share code
if find(lower(blist),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,blist,bname) or ""
done = true
end
else
for kk=1,#blist do
local vv = blist[kk]
if find(lower(vv),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,vv,bname) or ""
done = true
if not allresults then break end
end
@@ -11674,30 +11746,25 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
+local makewildcard = Cs(
+ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+ + (P("^")^0 * P("/") / "") * (P("*")/".*" + P("-")/"%%-" + P("?")/"."+ P("\\")/"/" + P(1))^0
+)
+
local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local result = { }
- local bname, dname = filebasename(filename), filedirname(filename)
- local path = gsub(dname,"^*/","")
- path = gsub(path,"*",".*")
- path = gsub(path,"-","%%-")
- if dname == "" then
- path = ".*"
- end
- local name = bname
- name = gsub(name,"*",".*")
- name = gsub(name,"-","%%-")
- path = lower(path)
- name = lower(name)
+ local path = lower(lpegmatch(makewildcard,filedirname (filename)))
+ local name = lower(lpegmatch(makewildcard,filebasename(filename)))
local files, done = instance.files, false
if find(name,"%*") then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- for kk, hh in next, files[hash.tag] do
+ local hashname, hashtype = hash.name, hash.type
+ for kk, hh in next, files[hashname] do
if not find(kk,"^remap:") then
if find(lower(kk),name) then
- if doit(path,hh,kk,tag,kind,result,allresults) then done = true end
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -11707,8 +11774,8 @@ local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- if doit(path,files[tag][bname],bname,tag,kind,result,allresults) then done = true end
+ local hashname, hashtype = hash.name, hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -11779,12 +11846,9 @@ end
-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
function resolvers.showpath(str) -- output search path for file type NAME
- return file.joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
--- resolvers.findfile(filename)
--- resolvers.findfile(filename, f.iletype)
-
function resolvers.registerfile(files, name, path)
if files[name] then
if type(files[name]) == 'string' then
@@ -11809,7 +11873,7 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
- local barename = gsub(name,"%.%a+$","")
+ local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.findfile(barename..".fmt")
@@ -11845,7 +11909,7 @@ function resolvers.dowithfilesintree(pattern,handle,before,after) -- can be a ni
for i=1,#hashes do
local hash = hashes[i]
local blobtype = hash.type
- local blobpath = hash.tag
+ local blobpath = hash.name
if blobpath then
if before then
before(blobtype,blobpath,pattern)
@@ -12020,13 +12084,23 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files"
}
-local allocate = utilities.storage.allocate
-
+local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.finders = allocate { notfound = { nil } }
-resolvers.openers = allocate { notfound = { nil } }
-resolvers.loaders = allocate { notfound = { false, nil, 0 } }
+local methodhandler = resolvers.methodhandler
+local registermethod = resolvers.registermethod
+
+local finders = allocate { helpers = { }, notfound = function() end }
+local openers = allocate { helpers = { }, notfound = function() end }
+local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+
+registermethod("finders", finders, "uri")
+registermethod("openers", openers, "uri")
+registermethod("loaders", loaders, "uri")
+
+resolvers.finders = finders
+resolvers.openers = openers
+resolvers.loaders = loaders
end -- of closure
@@ -12041,8 +12115,134 @@ if not modules then modules = { } end modules ['data-out'] = {
license = "see context related readme files"
}
-resolvers.savers = utilities.storage.allocate { }
+local allocate = utilities.storage.allocate
+local resolvers = resolvers
+
+local registermethod = resolvers.registermethod
+
+local savers = allocate { helpers = { } }
+resolvers.savers = savers
+
+registermethod("savers", savers, "uri")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-fil'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local resolvers = resolvers
+
+local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
+local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
+
+local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+
+function locators.file(specification)
+ local name = specification.filename
+ if name and name ~= '' and lfs.isdir(name) then
+ if trace_locating then
+ report_resolvers("file locator '%s' found",name)
+ end
+ resolvers.appendhash('file',name,true) -- cache
+ elseif trace_locating then
+ report_resolvers("file locator '%s' not found",name)
+ end
+end
+
+function hashers.file(specification)
+ local name = specification.filename
+ local content = caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
+end
+
+function generators.file(specification)
+ local name = specification.filename
+ local content = resolvers.scanfiles(name)
+ resolvers.registerfilehash(name,content,true)
+end
+
+concatinators.file = file.join
+
+function finders.file(specification,filetype)
+ local filename = specification.filename
+ local foundname = resolvers.findfile(filename,filetype)
+ if foundname and foundname ~= "" then
+ if trace_locating then
+ report_resolvers("file finder: '%s' found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_resolvers("file finder: %s' not found",filename)
+ end
+ return finders.notfound()
+ end
+end
+
+-- The default textopener will be overloaded later on.
+
+function openers.helpers.textopener(tag,filename,f)
+ return {
+ reader = function() return f:read () end,
+ close = function() return f:close() end,
+ }
+end
+
+function openers.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"r")
+ if f then
+ logs.show_open(filename) -- todo
+ if trace_locating then
+ report_resolvers("file opener, '%s' opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
+ end
+ end
+ if trace_locating then
+ report_resolvers("file opener, '%s' not found",filename)
+ end
+ return openers.notfound()
+end
+
+function loaders.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_resolvers("file loader, '%s' loaded",filename)
+ end
+ local s = f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true, s, #s
+ end
+ end
+ end
+ if trace_locating then
+ report_resolvers("file loader, '%s' not found",filename)
+ end
+ return loaders.notfound()
+end
end -- of closure
@@ -12301,10 +12501,9 @@ if not modules then modules = { } end modules ['data-zip'] = {
license = "see context related readme files"
}
--- to be redone using the more recent schemes mechanism
+-- partly redone .. needs testing
local format, find, match = string.format, string.find, string.match
-local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -12327,9 +12526,6 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, resolvers.concatinators
-
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -12359,159 +12555,159 @@ function zip.closearchive(name)
end
end
-function locators.zip(specification) -- where is this used? startup zips (untested)
- specification = resolvers.splitmethod(specification)
- local zipfile = specification.path
- local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
+function resolvers.locators.zip(specification)
+ local archive = specification.filename
+ local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
if trace_locating then
- if zfile then
- report_resolvers("zip locator, archive '%s' found",specification.original)
+ if zipfile then
+ report_resolvers("zip locator, archive '%s' found",archive)
else
- report_resolvers("zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",archive)
end
end
end
-function hashers.zip(tag,name)
+function resolvers.hashers.zip(specification)
+ local archive = specification.filename
if trace_locating then
- report_resolvers("loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s'",archive)
end
- resolvers.usezipfile(format("%s?tree=%s",tag,name))
+ resolvers.usezipfile(specification.original)
end
-function concatinators.zip(tag,path,name)
+function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
if not path or path == "" then
- return format('%s?name=%s',tag,name)
+ return format('%s?name=%s',zipfile,name)
else
- return format('%s?name=%s/%s',tag,path,name)
+ return format('%s?name=%s/%s',zipfile,path,name)
end
end
-function resolvers.isreadable.zip(name)
- return true
-end
-
-function finders.zip(specification,filetype)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.finders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
dfile = zfile:close()
if trace_locating then
- report_resolvers("zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",queryname)
end
return specification.original
elseif trace_locating then
- report_resolvers("zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",original)
end
- return unpack(finders.notfound)
+ return resolvers.finders.notfound()
end
-function openers.zip(specification)
- local zipspecification = resolvers.splitmethod(specification)
- if zipspecification.path then
- local q = url.query(zipspecification.query)
- if q.name then
- local zfile = zip.openarchive(zipspecification.path)
+function resolvers.openers.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_open(specification)
+ logs.show_open(original)
if trace_locating then
- report_resolvers("zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",queryname)
end
- return openers.textopener('zip',specification,dfile)
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
elseif trace_locating then
- report_resolvers("zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-function loaders.zip(specification)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.loaders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_load(filename)
+ logs.show_load(original)
if trace_locating then
- report_resolvers("zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",original)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- report_resolvers("zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-- zip:///somefile.zip
-- zip:///somefile.zip?tree=texmf-local -> mount
-function resolvers.usezipfile(zipname)
- zipname = validzip(zipname)
- local specification = resolvers.splitmethod(zipname)
- local zipfile = specification.path
- if zipfile and not registeredfiles[zipname] then
- local tree = url.query(specification.query).tree or ""
- local z = zip.openarchive(zipfile)
+function resolvers.usezipfile(archive)
+ local specification = resolvers.splitmethod(archive) -- to be sure
+ local archive = specification.filename
+ if archive and not registeredfiles[archive] then
+ local z = zip.openarchive(archive)
if z then
- local instance = resolvers.instance
+ local tree = url.query(specification.query).tree or ""
if trace_locating then
- report_resolvers("zip registering, registering archive '%s'",zipname)
- end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',zipname,zipfile)
- resolvers.extendtexmfvariable(zipname) -- resets hashes too
- registeredfiles[zipname] = z
- instance.files[zipname] = resolvers.registerzipfile(z,tree or "")
- statistics.stoptiming(instance)
+ report_resolvers("zip registering, registering archive '%s'",archive)
+ end
+ statistics.starttiming(resolvers.instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive) -- resets hashes too
+ registeredfiles[archive] = z
+ instance.files[archive] = resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(resolvers.instance)
elseif trace_locating then
- report_resolvers("zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",archive)
end
elseif trace_locating then
- report_resolvers("zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",archive)
end
end
@@ -12560,7 +12756,8 @@ if not modules then modules = { } end modules ['data-tre'] = {
-- \input tree://oeps1/**/oeps.tex
local find, gsub, format = string.find, string.gsub, string.format
-local unpack = unpack or table.unpack
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
@@ -12568,10 +12765,10 @@ local resolvers = resolvers
local done, found, notfound = { }, { }, resolvers.finders.notfound
-function resolvers.finders.tree(specification,filetype)
- local fnd = found[specification]
- if not fnd then
- local spec = resolvers.splitmethod(specification).path or ""
+function resolvers.finders.tree(specification)
+ local spec = specification.filename
+ local fnd = found[spec]
+ if fnd == nil then
if spec ~= "" then
local path, name = file.dirname(spec), file.basename(spec)
if path == "" then path = "." end
@@ -12585,53 +12782,41 @@ function resolvers.finders.tree(specification,filetype)
for k=1,#hash do
local v = hash[k]
if find(v,pattern) then
- found[specification] = v
+ found[spec] = v
return v
end
end
end
- fnd = unpack(notfound) -- unpack ? why not just notfound[1]
- found[specification] = fnd
+ fnd = notfound() -- false
+ found[spec] = fnd
end
return fnd
end
function resolvers.locators.tree(specification)
- local spec = resolvers.splitmethod(specification)
- local path = spec.path
- if path ~= '' and lfs.isdir(path) then
+ local name = specification.filename
+ if name ~= '' and lfs.isdir(name) then
if trace_locating then
- report_resolvers("tree locator '%s' found (%s)",path,specification)
+ report_resolvers("tree locator '%s' found",name)
end
- resolvers.appendhash('tree',specification,path,false) -- don't cache
+ resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
- report_resolvers("tree locator '%s' not found",path)
+ report_resolvers("tree locator '%s' not found",name)
end
end
-function resolvers.hashers.tree(tag,name)
+function resolvers.hashers.tree(specification)
+ local name = specification.filename
if trace_locating then
- report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ report_resolvers("analysing tree '%s'",name)
end
- -- todo: maybe share with done above
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+ resolvers.methodhandler("hashers",name)
end
-function resolvers.generators.tree(tag)
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
-end
-
-function resolvers.concatinators.tree(tag,path,name)
- return file.join(tag,path,name)
-end
-
-resolvers.isreadable.tree = file.isreadable
-resolvers.openers.tree = resolvers.openers.generic
-resolvers.loaders.tree = resolvers.loaders.generic
+resolvers.concatinators.tree = resolvers.concatinators.file
+resolvers.generators.tree = resolvers.generators.file
+resolvers.openers.tree = resolvers.openers.file
+resolvers.loaders.tree = resolvers.loaders.file
end -- of closure
@@ -12654,53 +12839,51 @@ local resolvers = resolvers
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-curl = curl or { }
-local curl = curl
+resolvers.curl = resolvers.curl or { }
+local curl = resolvers.curl
local cached = { }
-function curl.fetch(protocol, name) -- todo: use socket library
- local cleanname = gsub(name,"[^%a%d%.]+","-")
+local function runcurl(specification)
+ local original = specification.original
+ -- local scheme = specification.scheme
+ local cleanname = gsub(original,"[^%a%d%.]+","-")
local cachename = caches.setfirstwritablefile(cleanname,"curl")
- if not cached[name] then
+ if not cached[original] then
if not io.exists(cachename) then
- cached[name] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
+ cached[original] = cachename
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
os.spawn(command)
end
if io.exists(cachename) then
- cached[name] = cachename
+ cached[original] = cachename
else
- cached[name] = ""
+ cached[original] = ""
end
end
- return cached[name]
+ return cached[original]
end
-function finders.curl(protocol,filename)
- local foundname = curl.fetch(protocol, filename)
- return finders.generic(protocol,foundname,filetype)
-end
+-- old code: we could be cleaner using specification (see schemes)
-function openers.curl(protocol,filename)
- return openers.generic(protocol,filename)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",runcurl(specification),filetype)
end
-function loaders.curl(protocol,filename)
- return loaders.generic(protocol,filename)
-end
-
--- todo: metamethod
+local opener = openers.file
+local loader = loaders.file
-function curl.install(protocol)
- finders[protocol] = function (filename,filetype) return finders.curl(protocol,filename) end
- openers[protocol] = function (filename) return openers.curl(protocol,filename) end
- loaders[protocol] = function (filename) return loaders.curl(protocol,filename) end
+local function install(scheme)
+ finders[scheme] = finder
+ openers[scheme] = opener
+ loaders[scheme] = loader
end
-curl.install('http')
-curl.install('https')
-curl.install('ftp')
+resolvers.curl.install = install
+
+install('http')
+install('https')
+install('ftp')
end -- of closure
@@ -12777,7 +12960,7 @@ local function loaded(libpaths,name,simple)
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
@@ -12786,7 +12969,6 @@ local function loaded(libpaths,name,simple)
end
end
-
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! locating '%s'",name)
@@ -12824,7 +13006,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
@@ -12838,7 +13020,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
@@ -13375,6 +13557,7 @@ own.libs = { -- order can be made better
'data-pre.lua',
'data-inp.lua',
'data-out.lua',
+ 'data-fil.lua',
'data-con.lua',
'data-use.lua',
-- 'data-tex.lua',
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 3be305bed..6b74022ae 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -119,7 +119,7 @@ local patterns_escapes = {
["."] = "%.",
["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
+ ["("] = "%(", [")"] = "%)",
-- ["{"] = "%{", ["}"] = "%}"
-- ["^"] = "%^", ["$"] = "%$",
}
@@ -185,6 +185,7 @@ local patterns = lpeg.patterns
local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
local Ct, C, Cs, Cc, Cf, Cg = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local lpegtype = lpeg.type
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -201,7 +202,6 @@ patterns.alwaysmatched = alwaysmatched
local digit, sign = R('09'), S('+-')
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
local newline = crlf + cr + lf
-local utf8next = R("\128\191")
local escaped = P("\\") * anything
local squote = P("'")
local dquote = P('"')
@@ -222,6 +222,8 @@ local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le
+ utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+local utf8next = R("\128\191")
+
patterns.utf8one = R("\000\127")
patterns.utf8two = R("\194\223") * utf8next
patterns.utf8three = R("\224\239") * utf8next * utf8next
@@ -432,19 +434,25 @@ end
-- Just for fun I looked at the used bytecode and
-- p = (p and p + pp) or pp gets one more (testset).
-function lpeg.replacer(t)
- if #t > 0 then
- local p
- for i=1,#t do
- local ti= t[i]
- local pp = P(ti[1]) / ti[2]
- if p then
- p = p + pp
- else
- p = pp
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
end
+ return Cs((p + 1)^0)
end
- return Cs((p + 1)^0)
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
end
end
@@ -646,6 +654,10 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
end -- of closure
@@ -2558,6 +2570,9 @@ local separator = P("://")
local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
local rootbased = P("/") + letter*P(":")
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
+
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)
@@ -2678,72 +2693,95 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
+local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
--- from the spec (on the web):
+-- from wikipedia:
--
--- foo://example.com:8042/over/there?name=ferret#nose
--- \_/ \______________/\_________/ \_________/ \__/
--- | | | | |
--- scheme authority path query fragment
--- | _____________________|__
--- / \ / \
--- urn:example:animal:ferret:nose
+-- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
+-- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
+-- | | | | | | | |
+-- | userinfo hostname port | | query fragment
+-- | \________________________________/\_____________|____|/
+-- scheme | | | |
+-- | authority path | |
+-- | | |
+-- | path interpretable as filename
+-- | ___________|____________ |
+-- / \ / \ |
+-- urn:example:animal:ferret:nose interpretable as extension
url = url or { }
local url = url
-local function tochar(s)
- return char(tonumber(s,16))
-end
+local tochar = function(s) return char(tonumber(s,16)) end
-local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
+local colon = P(":")
+local qmark = P("?")
+local hash = P("#")
+local slash = P("/")
+local percent = P("%")
+local endofstring = P(-1)
-local hexdigit = lpegR("09","AF","af")
-local plus = lpegP("+")
-local nothing = lpegCc("")
-local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
+local hexdigit = R("09","AF","af")
+local plus = P("+")
+local nothing = Cc("")
+local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
-local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
-local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
-
-local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * Cs((escaped+(1- hash))^0) + nothing
+local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
-lpeg.patterns.urlsplitter = parser
+local parser = Ct(scheme * authority * path * query * fragment)
-local escapes = { }
+lpegpatterns.urlsplitter = parser
-for i=0,255 do
- escapes[i] = format("%%%02X",i)
-end
+local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-lpeg.patterns.urlescaper = escaper
+lpegpatterns.urlescaper = escaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
-function url.split(str)
+local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local function hasscheme(str)
+ local scheme = lpegmatch(scheme,str) -- at least one character
+ return scheme and scheme ~= ""
+end
+
-- todo: cache them
-function url.hashed(str) -- not yet ok (/test?test)
- local s = url.split(str)
+local rootletter = R("az","AZ")
+ + S("_-+")
+local separator = P("://")
+local qualified = P(".")^0 * P("/")
+ + rootletter * P(":")
+ + rootletter^1 * separator
+ + rootletter^1 * P("/")
+local rootbased = P("/")
+ + rootletter * P(":")
+
+local barswapper = replacer("|",":")
+local backslashswapper = replacer("\\","/")
+
+local function hashed(str) -- not yet ok (/test?test)
+ local s = split(str)
local somescheme = s[1] ~= ""
local somequery = s[4] ~= ""
if not somescheme and not somequery then
- return {
+ s = {
scheme = "file",
authority = "",
path = str,
@@ -2751,52 +2789,73 @@ function url.hashed(str) -- not yet ok (/test?test)
fragment = "",
original = str,
noscheme = true,
+ filename = str,
}
- else
- return {
+ else -- not always a filename but handy anyway
+ local authority, path, filename = s[2], s[3]
+ if authority == "" then
+ filename = path
+ else
+ filename = authority .. "/" .. path
+ end
+ s = {
scheme = s[1],
- authority = s[2],
- path = s[3],
+ authority = authority,
+ path = path,
query = s[4],
fragment = s[5],
original = str,
noscheme = false,
+ filename = filename,
}
end
+ return s
end
+-- Here we assume:
+--
+-- files: /// = relative
+-- files: //// = absolute (!)
+
-function url.hasscheme(str)
- return url.split(str)[1] ~= ""
-end
-function url.addscheme(str,scheme)
- return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
+url.split = split
+url.hasscheme = hasscheme
+url.hashed = hashed
+
+function url.addscheme(str,scheme) -- no authority
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///" .. str
+ else
+ return scheme .. ":///" .. str
+ end
end
function url.construct(hash) -- dodo: we need to escape !
- local fullurl = { }
+ local fullurl, f = { }, 0
local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
if scheme and scheme ~= "" then
- fullurl[#fullurl+1] = scheme .. "://"
+ f = f + 1 ; fullurl[f] = scheme .. "://"
end
if authority and authority ~= "" then
- fullurl[#fullurl+1] = authority
+ f = f + 1 ; fullurl[f] = authority
end
if path and path ~= "" then
- fullurl[#fullurl+1] = "/" .. path
+ f = f + 1 ; fullurl[f] = "/" .. path
end
if query and query ~= "" then
- fullurl[#fullurl+1] = "?".. query
+ f = f + 1 ; fullurl[f] = "?".. query
end
if fragment and fragment ~= "" then
- fullurl[#fullurl+1] = "#".. fragment
+ f = f + 1 ; fullurl[f] = "#".. fragment
end
return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
- local t = url.hashed(filename)
+ local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
@@ -2820,6 +2879,7 @@ end
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -2861,25 +2921,22 @@ end
-- optimizing for no find (*) does not save time
+
local function globpattern(path,patt,recurse,action)
- local ok, scanner
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
+ for name in walkdir(path) do
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if find(full,patt) then
+ action(full)
end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ globpattern(full,patt,recurse,action)
end
end
end
@@ -9363,10 +9420,10 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
local type, next = type, next
local ostype = os.type
@@ -9381,7 +9438,7 @@ local resolvers = resolvers
-- As this bit of code is somewhat special it gets its own module. After
-- all, when working on the main resolver code, I don't want to scroll
--- past this every time.
+-- past this every time. See data-obs.lua for the gsub variant.
-- {a,b,c,d}
-- a,b,c/{p,q,r},d
@@ -9396,95 +9453,70 @@ local resolvers = resolvers
-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local dummy_path_expr = "^!*unset/*$"
-
-local function do_first(a,b)
+local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = a .. s
+ n = n + 1 ; t[n] = a .. s
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_second(a,b)
+local function f_second(a,b)
local t, n = { }, 0
for s in gmatch(a,"[^,]+") do
- n = n + 1
- t[n] = s .. b
+ n = n + 1 ; t[n] = s .. b
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_both(a,b)
+local function f_both(a,b)
local t, n = { }, 0
for sa in gmatch(a,"[^,]+") do
for sb in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = sa .. sb
+ n = n + 1 ; t[n] = sa .. sb
end
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_three(a,b,c)
- return a .. b.. c
-end
+local left = P("{")
+local right = P("}")
+local var = P((1 - S("{}" ))^0)
+local set = P((1 - S("{},"))^0)
+local other = P(1)
-local stripper_1 = lpeg.stripper("{}@")
+local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
+local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
+local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
+local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
-local replacer_1 = lpeg.replacer {
- { ",}", ",@}" },
- { "{,", "{@," },
-}
+local stripper_1 = lpeg.stripper ("{}@")
+local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
-local function splitpathexpr(str, newlist, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
+local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
if trace_expansions then
report_resolvers("expanding variable '%s'",str)
end
local t, ok, done = newlist or { }, false, false
local n = #t
str = lpegmatch(replacer_1,str)
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
+ repeat local old = str
+ repeat local old = str ; str = lpegmatch(l_first, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_second,str) until old == str
+ repeat local old = str ; str = lpegmatch(l_both, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_rest, str) until old == str
+ until old == str -- or not find(str,"{")
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
if trace_expansions then
@@ -9495,50 +9527,23 @@ local function splitpathexpr(str, newlist, validate)
return t
end
+-- We could make the previous one public.
+
local function validate(s)
- local isrecursive = find(s,"//$")
- s = collapsepath(s)
- if isrecursive then
- s = s .. "//"
- end
- return s ~= "" and not find(s,dummy_path_expr) and s
+ s = collapsepath(s) -- already keeps the //
+ return s ~= "" and not find(s,"^!*unset/*$") and s
end
resolvers.validatedpath = validate -- keeps the trailing //
-function resolvers.expandedpathfromlist(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
+function resolvers.expandedpathfromlist(pathlist)
+ local newlist = { }
for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- local n = 0
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = validate(p)
- if p ~= "" then
- n = n + 1
- newlist[n] = p
- end
- end
- end
+ splitpathexpr(pathlist[k],newlist,validate)
end
return newlist
end
--- We also put some cleanup code here.
-
-
-
-
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
@@ -9576,14 +9581,13 @@ end
-- This one strips quotes and funny tokens.
+local expandhome = P("~") / "$HOME" -- environment.homedir
-local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
+local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
+local dostring = (expandhome + 1 )^0
-local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
-local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
-local dostring = (expandhome + 1 )^0
-
-local stripper = lpegCs(
+local stripper = Cs(
lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
)
@@ -9599,7 +9603,9 @@ end
local cache = { }
-local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+local splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local backslashswapper = lpeg.replacer("\\","/")
local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
if str then
@@ -9608,8 +9614,7 @@ local function splitconfigurationpath(str) -- beware, this can be either a path
if str == "" then
found = { }
else
- str = gsub(str,"\\","/")
- local split = lpegmatch(splitter,str)
+ local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
found = { }
local noffound = 0
for i=1,#split do
@@ -9658,57 +9663,62 @@ end
-local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function resolvers.scanfiles(specification)
- if trace_locating then
- report_resolvers("scanning path '%s'",specification)
- end
- local attributes, directory = lfs.attributes, lfs.dir
- local files = { __path__ = specification }
- local n, m, r = 0, 0, 0
- local function scan(spec,path)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- if path ~= "" then
- dirs[#dirs+1] = path..'/'..name
+local attributes, directory = lfs.attributes, lfs.dir
+
+local function scan(files,spec,path,n,m,r)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs, nofdirs = { }, 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
else
- dirs[#dirs+1] = name
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
end
end
+ elseif mode == 'directory' then
+ m = m + 1
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
end
end
- if #dirs > 0 then
- sort(dirs)
- for i=1,#dirs do
- scan(spec,dirs[i])
- end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
- scan(specification .. '/',"")
- files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ return files, n, m, r
+end
+
+function resolvers.scanfiles(path)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",path)
+ end
+ local files, n, m, r = scan({ },path .. '/',"",0,0,0)
+ files.__path__ = path
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
@@ -10399,9 +10409,15 @@ if not modules then modules = { } end modules ['data-met'] = {
license = "see context related readme files"
}
-local find = string.find
+local find, format = string.find, string.format
+local sequenced = table.sequenced
+local addurlscheme, urlhashed = url.addscheme, url.hashed
+
+local trace_locating = false
+
+trackers.register("resolvers.locating", function(v) trace_methods = v end)
+trackers.register("resolvers.methods", function(v) trace_methods = v end)
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
@@ -10409,41 +10425,109 @@ local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.concatinators = allocate ()
-resolvers.locators = allocate { notfound = { nil } } -- locate databases
-resolvers.hashers = allocate { notfound = { nil } } -- load databases
-resolvers.generators = allocate { notfound = { nil } } -- generate databases
+local registered = { }
-function resolvers.splitmethod(filename) -- todo: trigger by suffix
+local function splitmethod(filename) -- todo: filetype in specification
if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
+ return { scheme = "unknown", original = filename }
+ end
+ if type(filename) == "table" then
return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original = filename } -- quick hack
+ end
+ filename = file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
+ end
+ local specification = url.hashed(filename)
+ if not specification.scheme or specification.scheme == "" then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
else
- return url.hashed(filename)
+ return specification
end
end
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapsepath(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- local resolver = resolvers[what]
- if resolver[scheme] then
- if trace_locating then
- report_resolvers("using special handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+resolvers.splitmethod = splitmethod -- bad name but ok
+
+-- the second argument is always analyzed (saves time later on) and the original
+-- gets passed as original but also as argument
+
+local function methodhandler(what,first,...) -- filename can be nil or false
+ local method = registered[what]
+ if method then
+ local how, namespace = method.how, method.namespace
+ if how == "uri" or how == "url" then
+ local specification = splitmethod(first)
+ local scheme = specification.scheme
+ local resolver = namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, no handler",what,how)
+ end
+ end
+ elseif how == "tag" then
+ local resolver = namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, tag=%s",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default",what,how)
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, unknown",what,how)
+ end
+ end
end
- return resolver[scheme](filename,filetype,specification) -- todo: query
else
- if trace_locating then
- report_resolvers("no handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+ report_resolvers("resolver: method=%s, unknown",what)
+ end
+end
+
+resolvers.methodhandler = methodhandler
+
+function resolvers.registermethod(name,namespace,how)
+ registered[name] = { how = how or "tag", namespace = namespace }
+ namespace["byscheme"] = function(scheme,filename,...)
+ if scheme == "file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
end
- return resolver.tex(filename,filetype) -- todo: specification
end
end
+local concatinators = allocate { notfound = file.join } -- concatinate paths
+local locators = allocate { notfound = function() end } -- locate databases
+local hashers = allocate { notfound = function() end } -- load databases
+local generators = allocate { notfound = function() end } -- generate databases
+
+resolvers.concatinators = concatinators
+resolvers.locators = locators
+resolvers.hashers = hashers
+resolvers.generators = generators
+
+local registermethod = resolvers.registermethod
+
+registermethod("concatinators",concatinators,"tag")
+registermethod("locators", locators, "uri")
+registermethod("hashers", hashers, "uri")
+registermethod("generators", generators, "uri")
end -- of closure
@@ -10471,11 +10555,11 @@ local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
local os = os
-local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local collapsepath = file.collapsepath
+local collapsepath, joinpath = file.collapsepath, file.joinpath
local allocate = utilities.storage.allocate
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10489,6 +10573,7 @@ local resolvers = resolvers
local expandedpathfromlist = resolvers.expandedpathfromlist
local checkedvariable = resolvers.checkedvariable
local splitconfigurationpath = resolvers.splitconfigurationpath
+local methodhandler = resolvers.methodhandler
local initializesetter = utilities.setters.initialize
@@ -10502,12 +10587,12 @@ resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-local unset_variable = "unset"
+local unset_variable = "unset"
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
+local formats = resolvers.formats
+local suffixes = resolvers.suffixes
+local dangerous = resolvers.dangerous
+local suffixmap = resolvers.suffixmap
resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
@@ -10552,7 +10637,7 @@ function resolvers.newinstance()
end
-function resolvers.setinstance(someinstance)
+function resolvers.setinstance(someinstance) -- only one instance is active
instance = someinstance
resolvers.instance = someinstance
return someinstance
@@ -10574,7 +10659,7 @@ function resolvers.setenv(key,value)
end
end
-function resolvers.getenv(key)
+local function getenv(key)
local value = instance.environment[key]
if value and value ~= "" then
return value
@@ -10584,23 +10669,55 @@ function resolvers.getenv(key)
end
end
-resolvers.env = resolvers.getenv
+resolvers.getenv = getenv
+resolvers.env = getenv
+
+local function resolve(key)
+ local value = instance.variables[key] or ""
+ return (value ~= "" and value) or getenv(key) or ""
+end
+
+local dollarstripper = lpeg.stripper("$")
+local inhibitstripper = P("!")^0 * Cs(P(1)^0)
+local backslashswapper = lpeg.replacer("\\","/")
+
+local somevariable = P("$") / ""
+local somekey = C(R("az","AZ","09","__","--")^1)
+local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
+ + P(";") * (P(";") / "")
+ + P(1)
+
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
local function expandvars(lst) -- simple vars
- local variables, getenv = instance.variables, resolvers.getenv
- local function resolve(a)
- local va = variables[a] or ""
- return (va ~= "" and va) or getenv(a) or ""
- end
for k=1,#lst do
- local var = lst[k]
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- lst[k] = var
+ local lk = lst[k]
+ lst[k] = lpegmatch(pattern,lk) or lk
+ end
+end
+
+
+local slash = P("/")
+
+local pattern = Cs (
+ Cc("^") * (
+ Cc("%") * S(".-")
+ + slash^2 * P(-1) / "/.*"
+ + slash^2 / "/.-/"
+ + (1-slash) * P(-1) * Cc("/")
+ + P(1)
+ )^1 * Cc("$")
+)
+
+local function makepathexpression(str)
+ if str == "." then
+ return "^%./$"
+ else
+ return lpegmatch(pattern,str)
end
end
+
local function resolve(key)
local value = instance.variables[key]
if value and value ~= "" then
@@ -10614,22 +10731,21 @@ local function resolve(key)
return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
+
local function expandedvariable(var) -- simple vars
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- return var
+ return lpegmatch(pattern,var) or var
end
+
local function entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- local result = entries[name..'.'..instance.progname] or entries[name]
+ name = lpegmatch(dollarstripper,name)
local result = entries[instance.progname .. '.' .. name] or entries[name]
if result then
return result
else
- result = resolvers.getenv(name)
+ result = getenv(name)
if result then
instance.variables[name] = result
resolvers.expandvariables()
@@ -10642,8 +10758,7 @@ end
local function is_entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- return (entries[name..'.'..instance.progname] or entries[name]) ~= nil
+ name = lpegmatch(dollarstripper,name)
return (entries[instance.progname .. '.' .. name] or entries[name]) ~= nil
else
return false
@@ -10654,7 +10769,7 @@ local function reportcriticalvariables()
if trace_locating then
for i=1,#resolvers.criticalvars do
local v = resolvers.criticalvars[i]
- report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
+ report_resolvers("variable '%s' set to '%s'",v,getenv(v) or "unknown")
end
report_resolvers()
end
@@ -10664,7 +10779,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = resolvers.getenv('TEXMFCNF')
+ local cnfspec = getenv('TEXMFCNF')
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -10736,7 +10851,6 @@ local function load_configuration_files()
end
end
setups[pathname] = t
-
if resolvers.luacnfstate == "default" then
-- the following code is not tested
local cnfspec = t["TEXMFCNF"]
@@ -10798,63 +10912,30 @@ end
-- database loading
--- locators
-
-function resolvers.locatedatabase(specification)
- return resolvers.methodhandler('locators', specification)
-end
-
-function resolvers.locators.tex(specification)
- if specification and specification ~= '' and lfs.isdir(specification) then
- if trace_locating then
- report_resolvers("tex locator '%s' found",specification)
- end
- resolvers.appendhash('file',specification,filename,true) -- cache
- elseif trace_locating then
- report_resolvers("tex locator '%s' not found",specification)
- end
-end
-
--- hashers
-
-function resolvers.hashdatabase(tag,name)
- return resolvers.methodhandler('hashers',tag,name)
-end
-
local function load_file_databases()
instance.loaderror, instance.files = false, allocate()
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- resolvers.hashdatabase(hash.tag,hash.name)
+ resolvers.hashers.byscheme(hash.type,hash.name)
if instance.loaderror then break end
end
end
end
-function resolvers.hashers.tex(tag,name) -- used where?
- local content = caches.loadcontent(tag,'files')
- if content then
- instance.files[tag] = content
- else
- instance.files[tag] = { }
- instance.loaderror = true
- end
-end
-
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
local texmfpaths = resolvers.expandedpathlist('TEXMF')
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
- local stripped = gsub(path,"^!!","")
- local runtime = stripped == path
- path = resolvers.cleanpath(path)
+ local stripped = lpegmatch(inhibitstripper,path)
if stripped ~= "" then
+ local runtime = stripped == path
+ path = resolvers.cleanpath(path)
if lfs.isdir(path) then
local spec = resolvers.splitmethod(stripped)
- if spec.scheme == "cache" then
+ if spec.scheme == "cache" or spec.scheme == "file" then
stripped = spec.path
elseif runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
@@ -10866,7 +10947,7 @@ local function locate_file_databases()
report_resolvers("locating list of '%s' (cached)",path)
end
end
- resolvers.locatedatabase(stripped) -- nothing done with result
+ methodhandler('locators',stripped) -- nothing done with result
else
if trace_locating then
if runtime then
@@ -10885,8 +10966,9 @@ end
local function generate_file_databases()
local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.methodhandler('generators',hashes[i].tag)
+ for k=1,#hashes do
+ local hash = hashes[k]
+ methodhandler('generators',hash.name)
end
if trace_locating then
report_resolvers()
@@ -10896,10 +10978,13 @@ end
local function save_file_databases() -- will become cachers
for i=1,#instance.hashes do
local hash = instance.hashes[i]
- local cachename = hash.tag
+ local cachename = hash.name
if hash.cache then
local content = instance.files[cachename]
caches.collapsecontent(content)
+ if trace_locating then
+ report_resolvers("saving tree '%s'",cachename)
+ end
caches.savecontent(cachename,"files",content)
elseif trace_locating then
report_resolvers("not saving runtime tree '%s'",cachename)
@@ -10923,23 +11008,22 @@ local function load_databases()
end
end
-function resolvers.appendhash(type,tag,name,cache)
+function resolvers.appendhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' appended",tag)
+ report_resolvers("hash '%s' appended",name)
end
- insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, { type = type, name = name, cache = cache } )
end
-function resolvers.prependhash(type,tag,name,cache)
+function resolvers.prependhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' prepended",tag)
+ report_resolvers("hash '%s' prepended",name)
end
- insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expandedpathlist('TEXMF') -- full expansion
- local t = resolvers.splitpath(resolvers.getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv('TEXMF'))
insert(t,1,specification)
local newspec = concat(t,";")
if instance.environment["TEXMF"] then
@@ -10953,10 +11037,6 @@ function resolvers.extendtexmfvariable(specification) -- crap, we could better p
reset_hashes()
end
-function resolvers.generators.tex(specification,tag)
- instance.files[tag or specification] = resolvers.scanfiles(specification)
-end
-
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
@@ -10986,9 +11066,20 @@ function resolvers.datastate()
return caches.contentstate()
end
+local function resolve(a)
+ return instance.expansions[a] or getenv(a)
+end
+
+local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
+
+local variable = R("az","AZ","09","__","--")^1 / resolve
+ variable = (P("$")/"") * (variable + (P("{")/"") * variable * (P("}")/""))
+
+ cleaner = Cs((cleaner + P(1))^0)
+ variable = Cs((variable + P(1))^0)
+
function resolvers.expandvariables()
local expansions, environment, variables = allocate(), instance.environment, instance.variables
- local getenv = resolvers.getenv
instance.expansions = expansions
local engine, progname = instance.engine, instance.progname
if type(engine) ~= "string" then instance.engine, engine = "", "" end
@@ -10996,12 +11087,7 @@ function resolvers.expandvariables()
if engine ~= "" then environment['engine'] = engine end
if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
- -- local a, b = match(k,"^(%a+)%_(.*)%s*$") -- too many vars have an _ in the name
- -- if a and b then -- so let's forget about it; it was a
- -- expansions[a..'.'..b] = v -- hack anyway for linux and not needed
- -- else -- anymore as we now have directives
- expansions[k] = v
- -- end
+ expansions[k] = v
end
for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
@@ -11009,26 +11095,19 @@ function resolvers.expandvariables()
for k,v in next, variables do -- move variables to expansions
if not expansions[k] then expansions[k] = v end
end
- local busy = false
- local function resolve(a)
- busy = true
- return expansions[a] or getenv(a)
- end
- while true do
- busy = false
+ repeat
+ local busy = false
for k,v in next, expansions do
- local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
- local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
- if n > 0 or m > 0 then
- s = gsub(s,";+",";")
- s = gsub(s,";[!{}/\\]+;",";")
- expansions[k]= s
+ local s = lpegmatch(variable,v)
+ if s ~= v then
+ busy = true
+ expansions[k] = s
end
end
- if not busy then break end
- end
+ until not busy
+
for k,v in next, expansions do
- expansions[k] = gsub(v,"\\", '/')
+ expansions[k] = lpegmatch(cleaner,v)
end
end
@@ -11055,7 +11134,7 @@ function resolvers.unexpandedpathlist(str)
end
function resolvers.unexpandedpath(str)
- return file.joinpath(resolvers.unexpandedpathlist(str))
+ return joinpath(resolvers.unexpandedpathlist(str))
end
local done = { }
@@ -11169,7 +11248,7 @@ function resolvers.cleanpathlist(str)
end
function resolvers.expandpath(str)
- return file.joinpath(resolvers.expandedpathlist(str))
+ return joinpath(resolvers.expandedpathlist(str))
end
function resolvers.expandedpathlist(str)
@@ -11177,7 +11256,7 @@ function resolvers.expandedpathlist(str)
return ep or { } -- ep ?
elseif instance.savelists then
-- engine+progname hash
- str = gsub(str,"%$","")
+ str = lpegmatch(dollarstripper,str)
if not instance.lists[str] then -- cached
local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
instance.lists[str] = expandedpathfromlist(lst)
@@ -11190,28 +11269,34 @@ function resolvers.expandedpathlist(str)
end
function resolvers.expandedpathlistfromvariable(str) -- brrr
- local tmp = resolvers.variableofformatorsuffix(gsub(str,"%$",""))
- if tmp ~= "" then
- return resolvers.expandedpathlist(tmp)
- else
- return resolvers.expandedpathlist(str)
- end
+ str = lpegmatch(dollarstripper,str)
+ local tmp = resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
end
function resolvers.expandpathfromvariable(str)
- return file.joinpath(resolvers.expandedpathlistfromvariable(str))
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expandedpathfromlist(resolvers.splitpath(ori))
- return file.joinpath(pth)
+ return joinpath(pth)
end
-resolvers.isreadable = { }
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name] = content
+ else
+ instance.files[name] = { }
+ if somerror == true then -- can be unset
+ instance.loaderror = someerror
+ end
+ end
+end
-function resolvers.isreadable.file(name)
- local readable = lfs.isfile(name) -- brrr
+function isreadable(name)
+ local readable = file.is_readable(name)
if trace_detail then
if readable then
report_resolvers("file '%s' is readable",name)
@@ -11222,8 +11307,6 @@ function resolvers.isreadable.file(name)
return readable
end
-resolvers.isreadable.tex = resolvers.isreadable.file
-
-- name
-- name/name
@@ -11244,7 +11327,7 @@ local function collect_files(names)
local hashes = instance.hashes
for h=1,#hashes do
local hash = hashes[h]
- local blobpath = hash.tag
+ local blobpath = hash.name
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
@@ -11265,7 +11348,7 @@ local function collect_files(names)
if not dname or find(blobfile,dname) then
local kind = hash.type
local search = filejoin(blobpath,blobfile,bname)
- local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -11278,7 +11361,7 @@ local function collect_files(names)
if not dname or find(vv,dname) then
local kind = hash.type
local search = filejoin(blobpath,vv,bname)
- local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -11316,6 +11399,8 @@ local function can_be_dir(name) -- can become local
return fakepaths[name] == 1
end
+local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
+
local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
local result = { }
local stamp = nil
@@ -11333,7 +11418,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
end
if not dangerous[askedformat] then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_detail then
report_resolvers("file '%s' found directly",filename)
end
@@ -11349,7 +11434,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
result = resolvers.findwildcardfiles(filename) -- we can use th elocal
elseif file.is_qualified_path(filename) then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_locating then
report_resolvers("qualified name '%s'", filename)
end
@@ -11362,7 +11447,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for i=1,#format_suffixes do
local s = format_suffixes[i]
forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
+ if isreadable(forcedname) then
if trace_locating then
report_resolvers("no suffix, forcing format filetype '%s'", s)
end
@@ -11376,7 +11461,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
local basename = filebasename(filename)
- local pattern = gsub(filename .. "$","([%.%-])","%%%1")
+ local pattern = lpegmatch(preparetreepattern,filename)
-- messy .. to be sorted out
local savedformat = askedformat
local format = savedformat or ""
@@ -11471,7 +11556,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
- if fname and resolvers.isreadable.file(fname) then
+ if fname and isreadable(fname) then
filename, done = fname, true
result[#result+1] = filejoin('.',fname)
break
@@ -11497,26 +11582,15 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
if trace_detail then
report_resolvers("checking filename '%s'",filename)
end
- -- a bit messy ... esp the doscan setting here
- local doscan
for k=1,#pathlist do
local path = pathlist[k]
- if find(path,"^!!") then doscan = false else doscan = true end
- local pathname = gsub(path,"^!+", '')
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
done = false
-- using file list
if filelist then
- local expression
-- compare list entries with permitted pattern -- /xx /xx//
- if not find(pathname,"/$") then
- expression = pathname .. "/"
- else
- expression = pathname
- end
- expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
- expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
- expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
- expression = "^" .. expression .. "$"
+ local expression = makepathexpression(pathname)
if trace_detail then
report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
@@ -11545,7 +11619,8 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
if not done and doscan then
-- check if on disk / unchecked / does not work at all / also zips
- if resolvers.splitmethod(pathname).scheme == 'file' then -- ?
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
local pname = gsub(pathname,"%.%*$",'')
if not find(pname,"%*") then
local ppname = gsub(pname,"/+$","")
@@ -11553,7 +11628,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for k=1,#wantedfiles do
local w = wantedfiles[k]
local fname = filejoin(ppname,w)
- if resolvers.isreadable.file(fname) then
+ if isreadable(fname) then
if trace_detail then
report_resolvers("found '%s' by scanning",fname)
end
@@ -11586,9 +11661,6 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
return result
end
-resolvers.concatinators.tex = filejoin
-resolvers.concatinators.file = resolvers.concatinators.tex
-
local function findfiles(filename,filetype,allresults)
local result = collect_instance_files(filename,filetype or "",allresults)
if #result == 0 then
@@ -11609,7 +11681,7 @@ function resolvers.findfile(filename,filetype)
end
function resolvers.findpath(filename,filetype)
- return file.dirname(findfiles(filename,filetype,false)[1] or "")
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
local function findgivenfiles(filename,allresults)
@@ -11617,7 +11689,7 @@ local function findgivenfiles(filename,allresults)
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local files = instance.files[hash.tag] or { }
+ local files = instance.files[hash.name] or { }
local blist = files[bname]
if not blist then
local rname = "remap:"..bname
@@ -11629,12 +11701,12 @@ local function findgivenfiles(filename,allresults)
end
if blist then
if type(blist) == 'string' then
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
if not allresults then break end
else
for kk=1,#blist do
local vv = blist[kk]
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
if not allresults then break end
end
end
@@ -11657,14 +11729,14 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
if type(blist) == 'string' then
-- make function and share code
if find(lower(blist),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,blist,bname) or ""
done = true
end
else
for kk=1,#blist do
local vv = blist[kk]
if find(lower(vv),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,vv,bname) or ""
done = true
if not allresults then break end
end
@@ -11674,30 +11746,25 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
+local makewildcard = Cs(
+ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+ + (P("^")^0 * P("/") / "") * (P("*")/".*" + P("-")/"%%-" + P("?")/"."+ P("\\")/"/" + P(1))^0
+)
+
local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local result = { }
- local bname, dname = filebasename(filename), filedirname(filename)
- local path = gsub(dname,"^*/","")
- path = gsub(path,"*",".*")
- path = gsub(path,"-","%%-")
- if dname == "" then
- path = ".*"
- end
- local name = bname
- name = gsub(name,"*",".*")
- name = gsub(name,"-","%%-")
- path = lower(path)
- name = lower(name)
+ local path = lower(lpegmatch(makewildcard,filedirname (filename)))
+ local name = lower(lpegmatch(makewildcard,filebasename(filename)))
local files, done = instance.files, false
if find(name,"%*") then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- for kk, hh in next, files[hash.tag] do
+ local hashname, hashtype = hash.name, hash.type
+ for kk, hh in next, files[hashname] do
if not find(kk,"^remap:") then
if find(lower(kk),name) then
- if doit(path,hh,kk,tag,kind,result,allresults) then done = true end
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -11707,8 +11774,8 @@ local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- if doit(path,files[tag][bname],bname,tag,kind,result,allresults) then done = true end
+ local hashname, hashtype = hash.name, hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -11779,12 +11846,9 @@ end
-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
function resolvers.showpath(str) -- output search path for file type NAME
- return file.joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
--- resolvers.findfile(filename)
--- resolvers.findfile(filename, f.iletype)
-
function resolvers.registerfile(files, name, path)
if files[name] then
if type(files[name]) == 'string' then
@@ -11809,7 +11873,7 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
- local barename = gsub(name,"%.%a+$","")
+ local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.findfile(barename..".fmt")
@@ -11845,7 +11909,7 @@ function resolvers.dowithfilesintree(pattern,handle,before,after) -- can be a ni
for i=1,#hashes do
local hash = hashes[i]
local blobtype = hash.type
- local blobpath = hash.tag
+ local blobpath = hash.name
if blobpath then
if before then
before(blobtype,blobpath,pattern)
@@ -12020,13 +12084,23 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files"
}
-local allocate = utilities.storage.allocate
-
+local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.finders = allocate { notfound = { nil } }
-resolvers.openers = allocate { notfound = { nil } }
-resolvers.loaders = allocate { notfound = { false, nil, 0 } }
+local methodhandler = resolvers.methodhandler
+local registermethod = resolvers.registermethod
+
+local finders = allocate { helpers = { }, notfound = function() end }
+local openers = allocate { helpers = { }, notfound = function() end }
+local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+
+registermethod("finders", finders, "uri")
+registermethod("openers", openers, "uri")
+registermethod("loaders", loaders, "uri")
+
+resolvers.finders = finders
+resolvers.openers = openers
+resolvers.loaders = loaders
end -- of closure
@@ -12041,8 +12115,134 @@ if not modules then modules = { } end modules ['data-out'] = {
license = "see context related readme files"
}
-resolvers.savers = utilities.storage.allocate { }
+local allocate = utilities.storage.allocate
+local resolvers = resolvers
+
+local registermethod = resolvers.registermethod
+
+local savers = allocate { helpers = { } }
+resolvers.savers = savers
+
+registermethod("savers", savers, "uri")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-fil'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local resolvers = resolvers
+
+local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
+local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
+
+local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+
+function locators.file(specification)
+ local name = specification.filename
+ if name and name ~= '' and lfs.isdir(name) then
+ if trace_locating then
+ report_resolvers("file locator '%s' found",name)
+ end
+ resolvers.appendhash('file',name,true) -- cache
+ elseif trace_locating then
+ report_resolvers("file locator '%s' not found",name)
+ end
+end
+
+function hashers.file(specification)
+ local name = specification.filename
+ local content = caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
+end
+
+function generators.file(specification)
+ local name = specification.filename
+ local content = resolvers.scanfiles(name)
+ resolvers.registerfilehash(name,content,true)
+end
+
+concatinators.file = file.join
+
+function finders.file(specification,filetype)
+ local filename = specification.filename
+ local foundname = resolvers.findfile(filename,filetype)
+ if foundname and foundname ~= "" then
+ if trace_locating then
+ report_resolvers("file finder: '%s' found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_resolvers("file finder: %s' not found",filename)
+ end
+ return finders.notfound()
+ end
+end
+
+-- The default textopener will be overloaded later on.
+
+function openers.helpers.textopener(tag,filename,f)
+ return {
+ reader = function() return f:read () end,
+ close = function() return f:close() end,
+ }
+end
+
+function openers.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"r")
+ if f then
+ logs.show_open(filename) -- todo
+ if trace_locating then
+ report_resolvers("file opener, '%s' opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
+ end
+ end
+ if trace_locating then
+ report_resolvers("file opener, '%s' not found",filename)
+ end
+ return openers.notfound()
+end
+
+function loaders.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_resolvers("file loader, '%s' loaded",filename)
+ end
+ local s = f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true, s, #s
+ end
+ end
+ end
+ if trace_locating then
+ report_resolvers("file loader, '%s' not found",filename)
+ end
+ return loaders.notfound()
+end
end -- of closure
@@ -12301,10 +12501,9 @@ if not modules then modules = { } end modules ['data-zip'] = {
license = "see context related readme files"
}
--- to be redone using the more recent schemes mechanism
+-- partly redone .. needs testing
local format, find, match = string.format, string.find, string.match
-local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -12327,9 +12526,6 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, resolvers.concatinators
-
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -12359,159 +12555,159 @@ function zip.closearchive(name)
end
end
-function locators.zip(specification) -- where is this used? startup zips (untested)
- specification = resolvers.splitmethod(specification)
- local zipfile = specification.path
- local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
+function resolvers.locators.zip(specification)
+ local archive = specification.filename
+ local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
if trace_locating then
- if zfile then
- report_resolvers("zip locator, archive '%s' found",specification.original)
+ if zipfile then
+ report_resolvers("zip locator, archive '%s' found",archive)
else
- report_resolvers("zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",archive)
end
end
end
-function hashers.zip(tag,name)
+function resolvers.hashers.zip(specification)
+ local archive = specification.filename
if trace_locating then
- report_resolvers("loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s'",archive)
end
- resolvers.usezipfile(format("%s?tree=%s",tag,name))
+ resolvers.usezipfile(specification.original)
end
-function concatinators.zip(tag,path,name)
+function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
if not path or path == "" then
- return format('%s?name=%s',tag,name)
+ return format('%s?name=%s',zipfile,name)
else
- return format('%s?name=%s/%s',tag,path,name)
+ return format('%s?name=%s/%s',zipfile,path,name)
end
end
-function resolvers.isreadable.zip(name)
- return true
-end
-
-function finders.zip(specification,filetype)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.finders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
dfile = zfile:close()
if trace_locating then
- report_resolvers("zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",queryname)
end
return specification.original
elseif trace_locating then
- report_resolvers("zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",original)
end
- return unpack(finders.notfound)
+ return resolvers.finders.notfound()
end
-function openers.zip(specification)
- local zipspecification = resolvers.splitmethod(specification)
- if zipspecification.path then
- local q = url.query(zipspecification.query)
- if q.name then
- local zfile = zip.openarchive(zipspecification.path)
+function resolvers.openers.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_open(specification)
+ logs.show_open(original)
if trace_locating then
- report_resolvers("zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",queryname)
end
- return openers.textopener('zip',specification,dfile)
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
elseif trace_locating then
- report_resolvers("zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-function loaders.zip(specification)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.loaders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_load(filename)
+ logs.show_load(original)
if trace_locating then
- report_resolvers("zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",original)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- report_resolvers("zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-- zip:///somefile.zip
-- zip:///somefile.zip?tree=texmf-local -> mount
-function resolvers.usezipfile(zipname)
- zipname = validzip(zipname)
- local specification = resolvers.splitmethod(zipname)
- local zipfile = specification.path
- if zipfile and not registeredfiles[zipname] then
- local tree = url.query(specification.query).tree or ""
- local z = zip.openarchive(zipfile)
+function resolvers.usezipfile(archive)
+ local specification = resolvers.splitmethod(archive) -- to be sure
+ local archive = specification.filename
+ if archive and not registeredfiles[archive] then
+ local z = zip.openarchive(archive)
if z then
- local instance = resolvers.instance
+ local tree = url.query(specification.query).tree or ""
if trace_locating then
- report_resolvers("zip registering, registering archive '%s'",zipname)
- end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',zipname,zipfile)
- resolvers.extendtexmfvariable(zipname) -- resets hashes too
- registeredfiles[zipname] = z
- instance.files[zipname] = resolvers.registerzipfile(z,tree or "")
- statistics.stoptiming(instance)
+ report_resolvers("zip registering, registering archive '%s'",archive)
+ end
+ statistics.starttiming(resolvers.instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive) -- resets hashes too
+ registeredfiles[archive] = z
+ instance.files[archive] = resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(resolvers.instance)
elseif trace_locating then
- report_resolvers("zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",archive)
end
elseif trace_locating then
- report_resolvers("zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",archive)
end
end
@@ -12560,7 +12756,8 @@ if not modules then modules = { } end modules ['data-tre'] = {
-- \input tree://oeps1/**/oeps.tex
local find, gsub, format = string.find, string.gsub, string.format
-local unpack = unpack or table.unpack
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
@@ -12568,10 +12765,10 @@ local resolvers = resolvers
local done, found, notfound = { }, { }, resolvers.finders.notfound
-function resolvers.finders.tree(specification,filetype)
- local fnd = found[specification]
- if not fnd then
- local spec = resolvers.splitmethod(specification).path or ""
+function resolvers.finders.tree(specification)
+ local spec = specification.filename
+ local fnd = found[spec]
+ if fnd == nil then
if spec ~= "" then
local path, name = file.dirname(spec), file.basename(spec)
if path == "" then path = "." end
@@ -12585,53 +12782,41 @@ function resolvers.finders.tree(specification,filetype)
for k=1,#hash do
local v = hash[k]
if find(v,pattern) then
- found[specification] = v
+ found[spec] = v
return v
end
end
end
- fnd = unpack(notfound) -- unpack ? why not just notfound[1]
- found[specification] = fnd
+ fnd = notfound() -- false
+ found[spec] = fnd
end
return fnd
end
function resolvers.locators.tree(specification)
- local spec = resolvers.splitmethod(specification)
- local path = spec.path
- if path ~= '' and lfs.isdir(path) then
+ local name = specification.filename
+ if name ~= '' and lfs.isdir(name) then
if trace_locating then
- report_resolvers("tree locator '%s' found (%s)",path,specification)
+ report_resolvers("tree locator '%s' found",name)
end
- resolvers.appendhash('tree',specification,path,false) -- don't cache
+ resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
- report_resolvers("tree locator '%s' not found",path)
+ report_resolvers("tree locator '%s' not found",name)
end
end
-function resolvers.hashers.tree(tag,name)
+function resolvers.hashers.tree(specification)
+ local name = specification.filename
if trace_locating then
- report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ report_resolvers("analysing tree '%s'",name)
end
- -- todo: maybe share with done above
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+ resolvers.methodhandler("hashers",name)
end
-function resolvers.generators.tree(tag)
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
-end
-
-function resolvers.concatinators.tree(tag,path,name)
- return file.join(tag,path,name)
-end
-
-resolvers.isreadable.tree = file.isreadable
-resolvers.openers.tree = resolvers.openers.generic
-resolvers.loaders.tree = resolvers.loaders.generic
+resolvers.concatinators.tree = resolvers.concatinators.file
+resolvers.generators.tree = resolvers.generators.file
+resolvers.openers.tree = resolvers.openers.file
+resolvers.loaders.tree = resolvers.loaders.file
end -- of closure
@@ -12654,53 +12839,51 @@ local resolvers = resolvers
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-curl = curl or { }
-local curl = curl
+resolvers.curl = resolvers.curl or { }
+local curl = resolvers.curl
local cached = { }
-function curl.fetch(protocol, name) -- todo: use socket library
- local cleanname = gsub(name,"[^%a%d%.]+","-")
+local function runcurl(specification)
+ local original = specification.original
+ -- local scheme = specification.scheme
+ local cleanname = gsub(original,"[^%a%d%.]+","-")
local cachename = caches.setfirstwritablefile(cleanname,"curl")
- if not cached[name] then
+ if not cached[original] then
if not io.exists(cachename) then
- cached[name] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
+ cached[original] = cachename
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
os.spawn(command)
end
if io.exists(cachename) then
- cached[name] = cachename
+ cached[original] = cachename
else
- cached[name] = ""
+ cached[original] = ""
end
end
- return cached[name]
+ return cached[original]
end
-function finders.curl(protocol,filename)
- local foundname = curl.fetch(protocol, filename)
- return finders.generic(protocol,foundname,filetype)
-end
+-- old code: we could be cleaner using specification (see schemes)
-function openers.curl(protocol,filename)
- return openers.generic(protocol,filename)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",runcurl(specification),filetype)
end
-function loaders.curl(protocol,filename)
- return loaders.generic(protocol,filename)
-end
-
--- todo: metamethod
+local opener = openers.file
+local loader = loaders.file
-function curl.install(protocol)
- finders[protocol] = function (filename,filetype) return finders.curl(protocol,filename) end
- openers[protocol] = function (filename) return openers.curl(protocol,filename) end
- loaders[protocol] = function (filename) return loaders.curl(protocol,filename) end
+local function install(scheme)
+ finders[scheme] = finder
+ openers[scheme] = opener
+ loaders[scheme] = loader
end
-curl.install('http')
-curl.install('https')
-curl.install('ftp')
+resolvers.curl.install = install
+
+install('http')
+install('https')
+install('ftp')
end -- of closure
@@ -12777,7 +12960,7 @@ local function loaded(libpaths,name,simple)
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
@@ -12786,7 +12969,6 @@ local function loaded(libpaths,name,simple)
end
end
-
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! locating '%s'",name)
@@ -12824,7 +13006,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
@@ -12838,7 +13020,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
@@ -13375,6 +13557,7 @@ own.libs = { -- order can be made better
'data-pre.lua',
'data-inp.lua',
'data-out.lua',
+ 'data-fil.lua',
'data-con.lua',
'data-use.lua',
-- 'data-tex.lua',
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 3be305bed..6b74022ae 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -119,7 +119,7 @@ local patterns_escapes = {
["."] = "%.",
["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
+ ["("] = "%(", [")"] = "%)",
-- ["{"] = "%{", ["}"] = "%}"
-- ["^"] = "%^", ["$"] = "%$",
}
@@ -185,6 +185,7 @@ local patterns = lpeg.patterns
local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
local Ct, C, Cs, Cc, Cf, Cg = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local lpegtype = lpeg.type
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -201,7 +202,6 @@ patterns.alwaysmatched = alwaysmatched
local digit, sign = R('09'), S('+-')
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
local newline = crlf + cr + lf
-local utf8next = R("\128\191")
local escaped = P("\\") * anything
local squote = P("'")
local dquote = P('"')
@@ -222,6 +222,8 @@ local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le
+ utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+local utf8next = R("\128\191")
+
patterns.utf8one = R("\000\127")
patterns.utf8two = R("\194\223") * utf8next
patterns.utf8three = R("\224\239") * utf8next * utf8next
@@ -432,19 +434,25 @@ end
-- Just for fun I looked at the used bytecode and
-- p = (p and p + pp) or pp gets one more (testset).
-function lpeg.replacer(t)
- if #t > 0 then
- local p
- for i=1,#t do
- local ti= t[i]
- local pp = P(ti[1]) / ti[2]
- if p then
- p = p + pp
- else
- p = pp
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
end
+ return Cs((p + 1)^0)
end
- return Cs((p + 1)^0)
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
end
end
@@ -646,6 +654,10 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
end -- of closure
@@ -2558,6 +2570,9 @@ local separator = P("://")
local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
local rootbased = P("/") + letter*P(":")
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
+
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)
@@ -2678,72 +2693,95 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
+local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
--- from the spec (on the web):
+-- from wikipedia:
--
--- foo://example.com:8042/over/there?name=ferret#nose
--- \_/ \______________/\_________/ \_________/ \__/
--- | | | | |
--- scheme authority path query fragment
--- | _____________________|__
--- / \ / \
--- urn:example:animal:ferret:nose
+-- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
+-- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
+-- | | | | | | | |
+-- | userinfo hostname port | | query fragment
+-- | \________________________________/\_____________|____|/
+-- scheme | | | |
+-- | authority path | |
+-- | | |
+-- | path interpretable as filename
+-- | ___________|____________ |
+-- / \ / \ |
+-- urn:example:animal:ferret:nose interpretable as extension
url = url or { }
local url = url
-local function tochar(s)
- return char(tonumber(s,16))
-end
+local tochar = function(s) return char(tonumber(s,16)) end
-local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
+local colon = P(":")
+local qmark = P("?")
+local hash = P("#")
+local slash = P("/")
+local percent = P("%")
+local endofstring = P(-1)
-local hexdigit = lpegR("09","AF","af")
-local plus = lpegP("+")
-local nothing = lpegCc("")
-local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
+local hexdigit = R("09","AF","af")
+local plus = P("+")
+local nothing = Cc("")
+local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
-local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
-local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
-
-local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * Cs((escaped+(1- hash))^0) + nothing
+local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
-lpeg.patterns.urlsplitter = parser
+local parser = Ct(scheme * authority * path * query * fragment)
-local escapes = { }
+lpegpatterns.urlsplitter = parser
-for i=0,255 do
- escapes[i] = format("%%%02X",i)
-end
+local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-lpeg.patterns.urlescaper = escaper
+lpegpatterns.urlescaper = escaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
-function url.split(str)
+local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local function hasscheme(str)
+ local scheme = lpegmatch(scheme,str) -- at least one character
+ return scheme and scheme ~= ""
+end
+
-- todo: cache them
-function url.hashed(str) -- not yet ok (/test?test)
- local s = url.split(str)
+local rootletter = R("az","AZ")
+ + S("_-+")
+local separator = P("://")
+local qualified = P(".")^0 * P("/")
+ + rootletter * P(":")
+ + rootletter^1 * separator
+ + rootletter^1 * P("/")
+local rootbased = P("/")
+ + rootletter * P(":")
+
+local barswapper = replacer("|",":")
+local backslashswapper = replacer("\\","/")
+
+local function hashed(str) -- not yet ok (/test?test)
+ local s = split(str)
local somescheme = s[1] ~= ""
local somequery = s[4] ~= ""
if not somescheme and not somequery then
- return {
+ s = {
scheme = "file",
authority = "",
path = str,
@@ -2751,52 +2789,73 @@ function url.hashed(str) -- not yet ok (/test?test)
fragment = "",
original = str,
noscheme = true,
+ filename = str,
}
- else
- return {
+ else -- not always a filename but handy anyway
+ local authority, path, filename = s[2], s[3]
+ if authority == "" then
+ filename = path
+ else
+ filename = authority .. "/" .. path
+ end
+ s = {
scheme = s[1],
- authority = s[2],
- path = s[3],
+ authority = authority,
+ path = path,
query = s[4],
fragment = s[5],
original = str,
noscheme = false,
+ filename = filename,
}
end
+ return s
end
+-- Here we assume:
+--
+-- files: /// = relative
+-- files: //// = absolute (!)
+
-function url.hasscheme(str)
- return url.split(str)[1] ~= ""
-end
-function url.addscheme(str,scheme)
- return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
+url.split = split
+url.hasscheme = hasscheme
+url.hashed = hashed
+
+function url.addscheme(str,scheme) -- no authority
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///" .. str
+ else
+ return scheme .. ":///" .. str
+ end
end
function url.construct(hash) -- dodo: we need to escape !
- local fullurl = { }
+ local fullurl, f = { }, 0
local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
if scheme and scheme ~= "" then
- fullurl[#fullurl+1] = scheme .. "://"
+ f = f + 1 ; fullurl[f] = scheme .. "://"
end
if authority and authority ~= "" then
- fullurl[#fullurl+1] = authority
+ f = f + 1 ; fullurl[f] = authority
end
if path and path ~= "" then
- fullurl[#fullurl+1] = "/" .. path
+ f = f + 1 ; fullurl[f] = "/" .. path
end
if query and query ~= "" then
- fullurl[#fullurl+1] = "?".. query
+ f = f + 1 ; fullurl[f] = "?".. query
end
if fragment and fragment ~= "" then
- fullurl[#fullurl+1] = "#".. fragment
+ f = f + 1 ; fullurl[f] = "#".. fragment
end
return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
- local t = url.hashed(filename)
+ local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
@@ -2820,6 +2879,7 @@ end
+
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -2861,25 +2921,22 @@ end
-- optimizing for no find (*) does not save time
+
local function globpattern(path,patt,recurse,action)
- local ok, scanner
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
+ for name in walkdir(path) do
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if find(full,patt) then
+ action(full)
end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ globpattern(full,patt,recurse,action)
end
end
end
@@ -9363,10 +9420,10 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
local type, next = type, next
local ostype = os.type
@@ -9381,7 +9438,7 @@ local resolvers = resolvers
-- As this bit of code is somewhat special it gets its own module. After
-- all, when working on the main resolver code, I don't want to scroll
--- past this every time.
+-- past this every time. See data-obs.lua for the gsub variant.
-- {a,b,c,d}
-- a,b,c/{p,q,r},d
@@ -9396,95 +9453,70 @@ local resolvers = resolvers
-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local dummy_path_expr = "^!*unset/*$"
-
-local function do_first(a,b)
+local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = a .. s
+ n = n + 1 ; t[n] = a .. s
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_second(a,b)
+local function f_second(a,b)
local t, n = { }, 0
for s in gmatch(a,"[^,]+") do
- n = n + 1
- t[n] = s .. b
+ n = n + 1 ; t[n] = s .. b
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_both(a,b)
+local function f_both(a,b)
local t, n = { }, 0
for sa in gmatch(a,"[^,]+") do
for sb in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = sa .. sb
+ n = n + 1 ; t[n] = sa .. sb
end
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_three(a,b,c)
- return a .. b.. c
-end
+local left = P("{")
+local right = P("}")
+local var = P((1 - S("{}" ))^0)
+local set = P((1 - S("{},"))^0)
+local other = P(1)
-local stripper_1 = lpeg.stripper("{}@")
+local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
+local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
+local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
+local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
-local replacer_1 = lpeg.replacer {
- { ",}", ",@}" },
- { "{,", "{@," },
-}
+local stripper_1 = lpeg.stripper ("{}@")
+local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
-local function splitpathexpr(str, newlist, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
+local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
if trace_expansions then
report_resolvers("expanding variable '%s'",str)
end
local t, ok, done = newlist or { }, false, false
local n = #t
str = lpegmatch(replacer_1,str)
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
+ repeat local old = str
+ repeat local old = str ; str = lpegmatch(l_first, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_second,str) until old == str
+ repeat local old = str ; str = lpegmatch(l_both, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_rest, str) until old == str
+ until old == str -- or not find(str,"{")
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
if trace_expansions then
@@ -9495,50 +9527,23 @@ local function splitpathexpr(str, newlist, validate)
return t
end
+-- We could make the previous one public.
+
local function validate(s)
- local isrecursive = find(s,"//$")
- s = collapsepath(s)
- if isrecursive then
- s = s .. "//"
- end
- return s ~= "" and not find(s,dummy_path_expr) and s
+ s = collapsepath(s) -- already keeps the //
+ return s ~= "" and not find(s,"^!*unset/*$") and s
end
resolvers.validatedpath = validate -- keeps the trailing //
-function resolvers.expandedpathfromlist(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
+function resolvers.expandedpathfromlist(pathlist)
+ local newlist = { }
for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- local n = 0
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = validate(p)
- if p ~= "" then
- n = n + 1
- newlist[n] = p
- end
- end
- end
+ splitpathexpr(pathlist[k],newlist,validate)
end
return newlist
end
--- We also put some cleanup code here.
-
-
-
-
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
@@ -9576,14 +9581,13 @@ end
-- This one strips quotes and funny tokens.
+local expandhome = P("~") / "$HOME" -- environment.homedir
-local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
+local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
+local dostring = (expandhome + 1 )^0
-local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
-local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
-local dostring = (expandhome + 1 )^0
-
-local stripper = lpegCs(
+local stripper = Cs(
lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
)
@@ -9599,7 +9603,9 @@ end
local cache = { }
-local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+local splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local backslashswapper = lpeg.replacer("\\","/")
local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
if str then
@@ -9608,8 +9614,7 @@ local function splitconfigurationpath(str) -- beware, this can be either a path
if str == "" then
found = { }
else
- str = gsub(str,"\\","/")
- local split = lpegmatch(splitter,str)
+ local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
found = { }
local noffound = 0
for i=1,#split do
@@ -9658,57 +9663,62 @@ end
-local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function resolvers.scanfiles(specification)
- if trace_locating then
- report_resolvers("scanning path '%s'",specification)
- end
- local attributes, directory = lfs.attributes, lfs.dir
- local files = { __path__ = specification }
- local n, m, r = 0, 0, 0
- local function scan(spec,path)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- if path ~= "" then
- dirs[#dirs+1] = path..'/'..name
+local attributes, directory = lfs.attributes, lfs.dir
+
+local function scan(files,spec,path,n,m,r)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs, nofdirs = { }, 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
else
- dirs[#dirs+1] = name
+ f[#f+1] = path
+ end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
end
end
+ elseif mode == 'directory' then
+ m = m + 1
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
+ end
end
end
- if #dirs > 0 then
- sort(dirs)
- for i=1,#dirs do
- scan(spec,dirs[i])
- end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
- scan(specification .. '/',"")
- files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ return files, n, m, r
+end
+
+function resolvers.scanfiles(path)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",path)
+ end
+ local files, n, m, r = scan({ },path .. '/',"",0,0,0)
+ files.__path__ = path
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
@@ -10399,9 +10409,15 @@ if not modules then modules = { } end modules ['data-met'] = {
license = "see context related readme files"
}
-local find = string.find
+local find, format = string.find, string.format
+local sequenced = table.sequenced
+local addurlscheme, urlhashed = url.addscheme, url.hashed
+
+local trace_locating = false
+
+trackers.register("resolvers.locating", function(v) trace_methods = v end)
+trackers.register("resolvers.methods", function(v) trace_methods = v end)
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
@@ -10409,41 +10425,109 @@ local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.concatinators = allocate ()
-resolvers.locators = allocate { notfound = { nil } } -- locate databases
-resolvers.hashers = allocate { notfound = { nil } } -- load databases
-resolvers.generators = allocate { notfound = { nil } } -- generate databases
+local registered = { }
-function resolvers.splitmethod(filename) -- todo: trigger by suffix
+local function splitmethod(filename) -- todo: filetype in specification
if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
+ return { scheme = "unknown", original = filename }
+ end
+ if type(filename) == "table" then
return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original = filename } -- quick hack
+ end
+ filename = file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
+ end
+ local specification = url.hashed(filename)
+ if not specification.scheme or specification.scheme == "" then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
else
- return url.hashed(filename)
+ return specification
end
end
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapsepath(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- local resolver = resolvers[what]
- if resolver[scheme] then
- if trace_locating then
- report_resolvers("using special handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+resolvers.splitmethod = splitmethod -- bad name but ok
+
+-- the second argument is always analyzed (saves time later on) and the original
+-- gets passed as original but also as argument
+
+local function methodhandler(what,first,...) -- filename can be nil or false
+ local method = registered[what]
+ if method then
+ local how, namespace = method.how, method.namespace
+ if how == "uri" or how == "url" then
+ local specification = splitmethod(first)
+ local scheme = specification.scheme
+ local resolver = namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, no handler",what,how)
+ end
+ end
+ elseif how == "tag" then
+ local resolver = namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, tag=%s",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default",what,how)
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, unknown",what,how)
+ end
+ end
end
- return resolver[scheme](filename,filetype,specification) -- todo: query
else
- if trace_locating then
- report_resolvers("no handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+ report_resolvers("resolver: method=%s, unknown",what)
+ end
+end
+
+resolvers.methodhandler = methodhandler
+
+function resolvers.registermethod(name,namespace,how)
+ registered[name] = { how = how or "tag", namespace = namespace }
+ namespace["byscheme"] = function(scheme,filename,...)
+ if scheme == "file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
end
- return resolver.tex(filename,filetype) -- todo: specification
end
end
+local concatinators = allocate { notfound = file.join } -- concatinate paths
+local locators = allocate { notfound = function() end } -- locate databases
+local hashers = allocate { notfound = function() end } -- load databases
+local generators = allocate { notfound = function() end } -- generate databases
+
+resolvers.concatinators = concatinators
+resolvers.locators = locators
+resolvers.hashers = hashers
+resolvers.generators = generators
+
+local registermethod = resolvers.registermethod
+
+registermethod("concatinators",concatinators,"tag")
+registermethod("locators", locators, "uri")
+registermethod("hashers", hashers, "uri")
+registermethod("generators", generators, "uri")
end -- of closure
@@ -10471,11 +10555,11 @@ local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
local os = os
-local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local collapsepath = file.collapsepath
+local collapsepath, joinpath = file.collapsepath, file.joinpath
local allocate = utilities.storage.allocate
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -10489,6 +10573,7 @@ local resolvers = resolvers
local expandedpathfromlist = resolvers.expandedpathfromlist
local checkedvariable = resolvers.checkedvariable
local splitconfigurationpath = resolvers.splitconfigurationpath
+local methodhandler = resolvers.methodhandler
local initializesetter = utilities.setters.initialize
@@ -10502,12 +10587,12 @@ resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-local unset_variable = "unset"
+local unset_variable = "unset"
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
+local formats = resolvers.formats
+local suffixes = resolvers.suffixes
+local dangerous = resolvers.dangerous
+local suffixmap = resolvers.suffixmap
resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
@@ -10552,7 +10637,7 @@ function resolvers.newinstance()
end
-function resolvers.setinstance(someinstance)
+function resolvers.setinstance(someinstance) -- only one instance is active
instance = someinstance
resolvers.instance = someinstance
return someinstance
@@ -10574,7 +10659,7 @@ function resolvers.setenv(key,value)
end
end
-function resolvers.getenv(key)
+local function getenv(key)
local value = instance.environment[key]
if value and value ~= "" then
return value
@@ -10584,23 +10669,55 @@ function resolvers.getenv(key)
end
end
-resolvers.env = resolvers.getenv
+resolvers.getenv = getenv
+resolvers.env = getenv
+
+local function resolve(key)
+ local value = instance.variables[key] or ""
+ return (value ~= "" and value) or getenv(key) or ""
+end
+
+local dollarstripper = lpeg.stripper("$")
+local inhibitstripper = P("!")^0 * Cs(P(1)^0)
+local backslashswapper = lpeg.replacer("\\","/")
+
+local somevariable = P("$") / ""
+local somekey = C(R("az","AZ","09","__","--")^1)
+local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
+ + P(";") * (P(";") / "")
+ + P(1)
+
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
local function expandvars(lst) -- simple vars
- local variables, getenv = instance.variables, resolvers.getenv
- local function resolve(a)
- local va = variables[a] or ""
- return (va ~= "" and va) or getenv(a) or ""
- end
for k=1,#lst do
- local var = lst[k]
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- lst[k] = var
+ local lk = lst[k]
+ lst[k] = lpegmatch(pattern,lk) or lk
+ end
+end
+
+
+local slash = P("/")
+
+local pattern = Cs (
+ Cc("^") * (
+ Cc("%") * S(".-")
+ + slash^2 * P(-1) / "/.*"
+ + slash^2 / "/.-/"
+ + (1-slash) * P(-1) * Cc("/")
+ + P(1)
+ )^1 * Cc("$")
+)
+
+local function makepathexpression(str)
+ if str == "." then
+ return "^%./$"
+ else
+ return lpegmatch(pattern,str)
end
end
+
local function resolve(key)
local value = instance.variables[key]
if value and value ~= "" then
@@ -10614,22 +10731,21 @@ local function resolve(key)
return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
+
local function expandedvariable(var) -- simple vars
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- return var
+ return lpegmatch(pattern,var) or var
end
+
local function entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- local result = entries[name..'.'..instance.progname] or entries[name]
+ name = lpegmatch(dollarstripper,name)
local result = entries[instance.progname .. '.' .. name] or entries[name]
if result then
return result
else
- result = resolvers.getenv(name)
+ result = getenv(name)
if result then
instance.variables[name] = result
resolvers.expandvariables()
@@ -10642,8 +10758,7 @@ end
local function is_entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- return (entries[name..'.'..instance.progname] or entries[name]) ~= nil
+ name = lpegmatch(dollarstripper,name)
return (entries[instance.progname .. '.' .. name] or entries[name]) ~= nil
else
return false
@@ -10654,7 +10769,7 @@ local function reportcriticalvariables()
if trace_locating then
for i=1,#resolvers.criticalvars do
local v = resolvers.criticalvars[i]
- report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
+ report_resolvers("variable '%s' set to '%s'",v,getenv(v) or "unknown")
end
report_resolvers()
end
@@ -10664,7 +10779,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = resolvers.getenv('TEXMFCNF')
+ local cnfspec = getenv('TEXMFCNF')
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -10736,7 +10851,6 @@ local function load_configuration_files()
end
end
setups[pathname] = t
-
if resolvers.luacnfstate == "default" then
-- the following code is not tested
local cnfspec = t["TEXMFCNF"]
@@ -10798,63 +10912,30 @@ end
-- database loading
--- locators
-
-function resolvers.locatedatabase(specification)
- return resolvers.methodhandler('locators', specification)
-end
-
-function resolvers.locators.tex(specification)
- if specification and specification ~= '' and lfs.isdir(specification) then
- if trace_locating then
- report_resolvers("tex locator '%s' found",specification)
- end
- resolvers.appendhash('file',specification,filename,true) -- cache
- elseif trace_locating then
- report_resolvers("tex locator '%s' not found",specification)
- end
-end
-
--- hashers
-
-function resolvers.hashdatabase(tag,name)
- return resolvers.methodhandler('hashers',tag,name)
-end
-
local function load_file_databases()
instance.loaderror, instance.files = false, allocate()
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- resolvers.hashdatabase(hash.tag,hash.name)
+ resolvers.hashers.byscheme(hash.type,hash.name)
if instance.loaderror then break end
end
end
end
-function resolvers.hashers.tex(tag,name) -- used where?
- local content = caches.loadcontent(tag,'files')
- if content then
- instance.files[tag] = content
- else
- instance.files[tag] = { }
- instance.loaderror = true
- end
-end
-
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
local texmfpaths = resolvers.expandedpathlist('TEXMF')
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
- local stripped = gsub(path,"^!!","")
- local runtime = stripped == path
- path = resolvers.cleanpath(path)
+ local stripped = lpegmatch(inhibitstripper,path)
if stripped ~= "" then
+ local runtime = stripped == path
+ path = resolvers.cleanpath(path)
if lfs.isdir(path) then
local spec = resolvers.splitmethod(stripped)
- if spec.scheme == "cache" then
+ if spec.scheme == "cache" or spec.scheme == "file" then
stripped = spec.path
elseif runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
@@ -10866,7 +10947,7 @@ local function locate_file_databases()
report_resolvers("locating list of '%s' (cached)",path)
end
end
- resolvers.locatedatabase(stripped) -- nothing done with result
+ methodhandler('locators',stripped) -- nothing done with result
else
if trace_locating then
if runtime then
@@ -10885,8 +10966,9 @@ end
local function generate_file_databases()
local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.methodhandler('generators',hashes[i].tag)
+ for k=1,#hashes do
+ local hash = hashes[k]
+ methodhandler('generators',hash.name)
end
if trace_locating then
report_resolvers()
@@ -10896,10 +10978,13 @@ end
local function save_file_databases() -- will become cachers
for i=1,#instance.hashes do
local hash = instance.hashes[i]
- local cachename = hash.tag
+ local cachename = hash.name
if hash.cache then
local content = instance.files[cachename]
caches.collapsecontent(content)
+ if trace_locating then
+ report_resolvers("saving tree '%s'",cachename)
+ end
caches.savecontent(cachename,"files",content)
elseif trace_locating then
report_resolvers("not saving runtime tree '%s'",cachename)
@@ -10923,23 +11008,22 @@ local function load_databases()
end
end
-function resolvers.appendhash(type,tag,name,cache)
+function resolvers.appendhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' appended",tag)
+ report_resolvers("hash '%s' appended",name)
end
- insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, { type = type, name = name, cache = cache } )
end
-function resolvers.prependhash(type,tag,name,cache)
+function resolvers.prependhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' prepended",tag)
+ report_resolvers("hash '%s' prepended",name)
end
- insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expandedpathlist('TEXMF') -- full expansion
- local t = resolvers.splitpath(resolvers.getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv('TEXMF'))
insert(t,1,specification)
local newspec = concat(t,";")
if instance.environment["TEXMF"] then
@@ -10953,10 +11037,6 @@ function resolvers.extendtexmfvariable(specification) -- crap, we could better p
reset_hashes()
end
-function resolvers.generators.tex(specification,tag)
- instance.files[tag or specification] = resolvers.scanfiles(specification)
-end
-
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
@@ -10986,9 +11066,20 @@ function resolvers.datastate()
return caches.contentstate()
end
+local function resolve(a)
+ return instance.expansions[a] or getenv(a)
+end
+
+local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
+
+local variable = R("az","AZ","09","__","--")^1 / resolve
+ variable = (P("$")/"") * (variable + (P("{")/"") * variable * (P("}")/""))
+
+ cleaner = Cs((cleaner + P(1))^0)
+ variable = Cs((variable + P(1))^0)
+
function resolvers.expandvariables()
local expansions, environment, variables = allocate(), instance.environment, instance.variables
- local getenv = resolvers.getenv
instance.expansions = expansions
local engine, progname = instance.engine, instance.progname
if type(engine) ~= "string" then instance.engine, engine = "", "" end
@@ -10996,12 +11087,7 @@ function resolvers.expandvariables()
if engine ~= "" then environment['engine'] = engine end
if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
- -- local a, b = match(k,"^(%a+)%_(.*)%s*$") -- too many vars have an _ in the name
- -- if a and b then -- so let's forget about it; it was a
- -- expansions[a..'.'..b] = v -- hack anyway for linux and not needed
- -- else -- anymore as we now have directives
- expansions[k] = v
- -- end
+ expansions[k] = v
end
for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
@@ -11009,26 +11095,19 @@ function resolvers.expandvariables()
for k,v in next, variables do -- move variables to expansions
if not expansions[k] then expansions[k] = v end
end
- local busy = false
- local function resolve(a)
- busy = true
- return expansions[a] or getenv(a)
- end
- while true do
- busy = false
+ repeat
+ local busy = false
for k,v in next, expansions do
- local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
- local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
- if n > 0 or m > 0 then
- s = gsub(s,";+",";")
- s = gsub(s,";[!{}/\\]+;",";")
- expansions[k]= s
+ local s = lpegmatch(variable,v)
+ if s ~= v then
+ busy = true
+ expansions[k] = s
end
end
- if not busy then break end
- end
+ until not busy
+
for k,v in next, expansions do
- expansions[k] = gsub(v,"\\", '/')
+ expansions[k] = lpegmatch(cleaner,v)
end
end
@@ -11055,7 +11134,7 @@ function resolvers.unexpandedpathlist(str)
end
function resolvers.unexpandedpath(str)
- return file.joinpath(resolvers.unexpandedpathlist(str))
+ return joinpath(resolvers.unexpandedpathlist(str))
end
local done = { }
@@ -11169,7 +11248,7 @@ function resolvers.cleanpathlist(str)
end
function resolvers.expandpath(str)
- return file.joinpath(resolvers.expandedpathlist(str))
+ return joinpath(resolvers.expandedpathlist(str))
end
function resolvers.expandedpathlist(str)
@@ -11177,7 +11256,7 @@ function resolvers.expandedpathlist(str)
return ep or { } -- ep ?
elseif instance.savelists then
-- engine+progname hash
- str = gsub(str,"%$","")
+ str = lpegmatch(dollarstripper,str)
if not instance.lists[str] then -- cached
local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
instance.lists[str] = expandedpathfromlist(lst)
@@ -11190,28 +11269,34 @@ function resolvers.expandedpathlist(str)
end
function resolvers.expandedpathlistfromvariable(str) -- brrr
- local tmp = resolvers.variableofformatorsuffix(gsub(str,"%$",""))
- if tmp ~= "" then
- return resolvers.expandedpathlist(tmp)
- else
- return resolvers.expandedpathlist(str)
- end
+ str = lpegmatch(dollarstripper,str)
+ local tmp = resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
end
function resolvers.expandpathfromvariable(str)
- return file.joinpath(resolvers.expandedpathlistfromvariable(str))
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expandedpathfromlist(resolvers.splitpath(ori))
- return file.joinpath(pth)
+ return joinpath(pth)
end
-resolvers.isreadable = { }
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name] = content
+ else
+ instance.files[name] = { }
+ if somerror == true then -- can be unset
+ instance.loaderror = someerror
+ end
+ end
+end
-function resolvers.isreadable.file(name)
- local readable = lfs.isfile(name) -- brrr
+function isreadable(name)
+ local readable = file.is_readable(name)
if trace_detail then
if readable then
report_resolvers("file '%s' is readable",name)
@@ -11222,8 +11307,6 @@ function resolvers.isreadable.file(name)
return readable
end
-resolvers.isreadable.tex = resolvers.isreadable.file
-
-- name
-- name/name
@@ -11244,7 +11327,7 @@ local function collect_files(names)
local hashes = instance.hashes
for h=1,#hashes do
local hash = hashes[h]
- local blobpath = hash.tag
+ local blobpath = hash.name
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
@@ -11265,7 +11348,7 @@ local function collect_files(names)
if not dname or find(blobfile,dname) then
local kind = hash.type
local search = filejoin(blobpath,blobfile,bname)
- local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -11278,7 +11361,7 @@ local function collect_files(names)
if not dname or find(vv,dname) then
local kind = hash.type
local search = filejoin(blobpath,vv,bname)
- local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -11316,6 +11399,8 @@ local function can_be_dir(name) -- can become local
return fakepaths[name] == 1
end
+local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
+
local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
local result = { }
local stamp = nil
@@ -11333,7 +11418,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
end
if not dangerous[askedformat] then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_detail then
report_resolvers("file '%s' found directly",filename)
end
@@ -11349,7 +11434,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
result = resolvers.findwildcardfiles(filename) -- we can use th elocal
elseif file.is_qualified_path(filename) then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_locating then
report_resolvers("qualified name '%s'", filename)
end
@@ -11362,7 +11447,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for i=1,#format_suffixes do
local s = format_suffixes[i]
forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
+ if isreadable(forcedname) then
if trace_locating then
report_resolvers("no suffix, forcing format filetype '%s'", s)
end
@@ -11376,7 +11461,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
local basename = filebasename(filename)
- local pattern = gsub(filename .. "$","([%.%-])","%%%1")
+ local pattern = lpegmatch(preparetreepattern,filename)
-- messy .. to be sorted out
local savedformat = askedformat
local format = savedformat or ""
@@ -11471,7 +11556,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
- if fname and resolvers.isreadable.file(fname) then
+ if fname and isreadable(fname) then
filename, done = fname, true
result[#result+1] = filejoin('.',fname)
break
@@ -11497,26 +11582,15 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
if trace_detail then
report_resolvers("checking filename '%s'",filename)
end
- -- a bit messy ... esp the doscan setting here
- local doscan
for k=1,#pathlist do
local path = pathlist[k]
- if find(path,"^!!") then doscan = false else doscan = true end
- local pathname = gsub(path,"^!+", '')
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
done = false
-- using file list
if filelist then
- local expression
-- compare list entries with permitted pattern -- /xx /xx//
- if not find(pathname,"/$") then
- expression = pathname .. "/"
- else
- expression = pathname
- end
- expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
- expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
- expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
- expression = "^" .. expression .. "$"
+ local expression = makepathexpression(pathname)
if trace_detail then
report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
@@ -11545,7 +11619,8 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
if not done and doscan then
-- check if on disk / unchecked / does not work at all / also zips
- if resolvers.splitmethod(pathname).scheme == 'file' then -- ?
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
local pname = gsub(pathname,"%.%*$",'')
if not find(pname,"%*") then
local ppname = gsub(pname,"/+$","")
@@ -11553,7 +11628,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for k=1,#wantedfiles do
local w = wantedfiles[k]
local fname = filejoin(ppname,w)
- if resolvers.isreadable.file(fname) then
+ if isreadable(fname) then
if trace_detail then
report_resolvers("found '%s' by scanning",fname)
end
@@ -11586,9 +11661,6 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
return result
end
-resolvers.concatinators.tex = filejoin
-resolvers.concatinators.file = resolvers.concatinators.tex
-
local function findfiles(filename,filetype,allresults)
local result = collect_instance_files(filename,filetype or "",allresults)
if #result == 0 then
@@ -11609,7 +11681,7 @@ function resolvers.findfile(filename,filetype)
end
function resolvers.findpath(filename,filetype)
- return file.dirname(findfiles(filename,filetype,false)[1] or "")
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
local function findgivenfiles(filename,allresults)
@@ -11617,7 +11689,7 @@ local function findgivenfiles(filename,allresults)
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local files = instance.files[hash.tag] or { }
+ local files = instance.files[hash.name] or { }
local blist = files[bname]
if not blist then
local rname = "remap:"..bname
@@ -11629,12 +11701,12 @@ local function findgivenfiles(filename,allresults)
end
if blist then
if type(blist) == 'string' then
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
if not allresults then break end
else
for kk=1,#blist do
local vv = blist[kk]
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
if not allresults then break end
end
end
@@ -11657,14 +11729,14 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
if type(blist) == 'string' then
-- make function and share code
if find(lower(blist),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,blist,bname) or ""
done = true
end
else
for kk=1,#blist do
local vv = blist[kk]
if find(lower(vv),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,vv,bname) or ""
done = true
if not allresults then break end
end
@@ -11674,30 +11746,25 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
+local makewildcard = Cs(
+ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+ + (P("^")^0 * P("/") / "") * (P("*")/".*" + P("-")/"%%-" + P("?")/"."+ P("\\")/"/" + P(1))^0
+)
+
local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local result = { }
- local bname, dname = filebasename(filename), filedirname(filename)
- local path = gsub(dname,"^*/","")
- path = gsub(path,"*",".*")
- path = gsub(path,"-","%%-")
- if dname == "" then
- path = ".*"
- end
- local name = bname
- name = gsub(name,"*",".*")
- name = gsub(name,"-","%%-")
- path = lower(path)
- name = lower(name)
+ local path = lower(lpegmatch(makewildcard,filedirname (filename)))
+ local name = lower(lpegmatch(makewildcard,filebasename(filename)))
local files, done = instance.files, false
if find(name,"%*") then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- for kk, hh in next, files[hash.tag] do
+ local hashname, hashtype = hash.name, hash.type
+ for kk, hh in next, files[hashname] do
if not find(kk,"^remap:") then
if find(lower(kk),name) then
- if doit(path,hh,kk,tag,kind,result,allresults) then done = true end
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -11707,8 +11774,8 @@ local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- if doit(path,files[tag][bname],bname,tag,kind,result,allresults) then done = true end
+ local hashname, hashtype = hash.name, hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -11779,12 +11846,9 @@ end
-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
function resolvers.showpath(str) -- output search path for file type NAME
- return file.joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
--- resolvers.findfile(filename)
--- resolvers.findfile(filename, f.iletype)
-
function resolvers.registerfile(files, name, path)
if files[name] then
if type(files[name]) == 'string' then
@@ -11809,7 +11873,7 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
- local barename = gsub(name,"%.%a+$","")
+ local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.findfile(barename..".fmt")
@@ -11845,7 +11909,7 @@ function resolvers.dowithfilesintree(pattern,handle,before,after) -- can be a ni
for i=1,#hashes do
local hash = hashes[i]
local blobtype = hash.type
- local blobpath = hash.tag
+ local blobpath = hash.name
if blobpath then
if before then
before(blobtype,blobpath,pattern)
@@ -12020,13 +12084,23 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files"
}
-local allocate = utilities.storage.allocate
-
+local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.finders = allocate { notfound = { nil } }
-resolvers.openers = allocate { notfound = { nil } }
-resolvers.loaders = allocate { notfound = { false, nil, 0 } }
+local methodhandler = resolvers.methodhandler
+local registermethod = resolvers.registermethod
+
+local finders = allocate { helpers = { }, notfound = function() end }
+local openers = allocate { helpers = { }, notfound = function() end }
+local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+
+registermethod("finders", finders, "uri")
+registermethod("openers", openers, "uri")
+registermethod("loaders", loaders, "uri")
+
+resolvers.finders = finders
+resolvers.openers = openers
+resolvers.loaders = loaders
end -- of closure
@@ -12041,8 +12115,134 @@ if not modules then modules = { } end modules ['data-out'] = {
license = "see context related readme files"
}
-resolvers.savers = utilities.storage.allocate { }
+local allocate = utilities.storage.allocate
+local resolvers = resolvers
+
+local registermethod = resolvers.registermethod
+
+local savers = allocate { helpers = { } }
+resolvers.savers = savers
+
+registermethod("savers", savers, "uri")
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+if not modules then modules = { } end modules ['data-fil'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local resolvers = resolvers
+
+local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
+local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
+
+local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+
+function locators.file(specification)
+ local name = specification.filename
+ if name and name ~= '' and lfs.isdir(name) then
+ if trace_locating then
+ report_resolvers("file locator '%s' found",name)
+ end
+ resolvers.appendhash('file',name,true) -- cache
+ elseif trace_locating then
+ report_resolvers("file locator '%s' not found",name)
+ end
+end
+
+function hashers.file(specification)
+ local name = specification.filename
+ local content = caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
+end
+
+function generators.file(specification)
+ local name = specification.filename
+ local content = resolvers.scanfiles(name)
+ resolvers.registerfilehash(name,content,true)
+end
+
+concatinators.file = file.join
+
+function finders.file(specification,filetype)
+ local filename = specification.filename
+ local foundname = resolvers.findfile(filename,filetype)
+ if foundname and foundname ~= "" then
+ if trace_locating then
+ report_resolvers("file finder: '%s' found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_resolvers("file finder: %s' not found",filename)
+ end
+ return finders.notfound()
+ end
+end
+
+-- The default textopener will be overloaded later on.
+
+function openers.helpers.textopener(tag,filename,f)
+ return {
+ reader = function() return f:read () end,
+ close = function() return f:close() end,
+ }
+end
+
+function openers.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"r")
+ if f then
+ logs.show_open(filename) -- todo
+ if trace_locating then
+ report_resolvers("file opener, '%s' opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
+ end
+ end
+ if trace_locating then
+ report_resolvers("file opener, '%s' not found",filename)
+ end
+ return openers.notfound()
+end
+
+function loaders.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_resolvers("file loader, '%s' loaded",filename)
+ end
+ local s = f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true, s, #s
+ end
+ end
+ end
+ if trace_locating then
+ report_resolvers("file loader, '%s' not found",filename)
+ end
+ return loaders.notfound()
+end
end -- of closure
@@ -12301,10 +12501,9 @@ if not modules then modules = { } end modules ['data-zip'] = {
license = "see context related readme files"
}
--- to be redone using the more recent schemes mechanism
+-- partly redone .. needs testing
local format, find, match = string.format, string.find, string.match
-local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -12327,9 +12526,6 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, resolvers.concatinators
-
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -12359,159 +12555,159 @@ function zip.closearchive(name)
end
end
-function locators.zip(specification) -- where is this used? startup zips (untested)
- specification = resolvers.splitmethod(specification)
- local zipfile = specification.path
- local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
+function resolvers.locators.zip(specification)
+ local archive = specification.filename
+ local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
if trace_locating then
- if zfile then
- report_resolvers("zip locator, archive '%s' found",specification.original)
+ if zipfile then
+ report_resolvers("zip locator, archive '%s' found",archive)
else
- report_resolvers("zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",archive)
end
end
end
-function hashers.zip(tag,name)
+function resolvers.hashers.zip(specification)
+ local archive = specification.filename
if trace_locating then
- report_resolvers("loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s'",archive)
end
- resolvers.usezipfile(format("%s?tree=%s",tag,name))
+ resolvers.usezipfile(specification.original)
end
-function concatinators.zip(tag,path,name)
+function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
if not path or path == "" then
- return format('%s?name=%s',tag,name)
+ return format('%s?name=%s',zipfile,name)
else
- return format('%s?name=%s/%s',tag,path,name)
+ return format('%s?name=%s/%s',zipfile,path,name)
end
end
-function resolvers.isreadable.zip(name)
- return true
-end
-
-function finders.zip(specification,filetype)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.finders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
dfile = zfile:close()
if trace_locating then
- report_resolvers("zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",queryname)
end
return specification.original
elseif trace_locating then
- report_resolvers("zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",original)
end
- return unpack(finders.notfound)
+ return resolvers.finders.notfound()
end
-function openers.zip(specification)
- local zipspecification = resolvers.splitmethod(specification)
- if zipspecification.path then
- local q = url.query(zipspecification.query)
- if q.name then
- local zfile = zip.openarchive(zipspecification.path)
+function resolvers.openers.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_open(specification)
+ logs.show_open(original)
if trace_locating then
- report_resolvers("zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",queryname)
end
- return openers.textopener('zip',specification,dfile)
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
elseif trace_locating then
- report_resolvers("zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-function loaders.zip(specification)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.loaders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_load(filename)
+ logs.show_load(original)
if trace_locating then
- report_resolvers("zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",original)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- report_resolvers("zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-- zip:///somefile.zip
-- zip:///somefile.zip?tree=texmf-local -> mount
-function resolvers.usezipfile(zipname)
- zipname = validzip(zipname)
- local specification = resolvers.splitmethod(zipname)
- local zipfile = specification.path
- if zipfile and not registeredfiles[zipname] then
- local tree = url.query(specification.query).tree or ""
- local z = zip.openarchive(zipfile)
+function resolvers.usezipfile(archive)
+ local specification = resolvers.splitmethod(archive) -- to be sure
+ local archive = specification.filename
+ if archive and not registeredfiles[archive] then
+ local z = zip.openarchive(archive)
if z then
- local instance = resolvers.instance
+ local tree = url.query(specification.query).tree or ""
if trace_locating then
- report_resolvers("zip registering, registering archive '%s'",zipname)
- end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',zipname,zipfile)
- resolvers.extendtexmfvariable(zipname) -- resets hashes too
- registeredfiles[zipname] = z
- instance.files[zipname] = resolvers.registerzipfile(z,tree or "")
- statistics.stoptiming(instance)
+ report_resolvers("zip registering, registering archive '%s'",archive)
+ end
+ statistics.starttiming(resolvers.instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive) -- resets hashes too
+ registeredfiles[archive] = z
+ instance.files[archive] = resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(resolvers.instance)
elseif trace_locating then
- report_resolvers("zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",archive)
end
elseif trace_locating then
- report_resolvers("zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",archive)
end
end
@@ -12560,7 +12756,8 @@ if not modules then modules = { } end modules ['data-tre'] = {
-- \input tree://oeps1/**/oeps.tex
local find, gsub, format = string.find, string.gsub, string.format
-local unpack = unpack or table.unpack
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
@@ -12568,10 +12765,10 @@ local resolvers = resolvers
local done, found, notfound = { }, { }, resolvers.finders.notfound
-function resolvers.finders.tree(specification,filetype)
- local fnd = found[specification]
- if not fnd then
- local spec = resolvers.splitmethod(specification).path or ""
+function resolvers.finders.tree(specification)
+ local spec = specification.filename
+ local fnd = found[spec]
+ if fnd == nil then
if spec ~= "" then
local path, name = file.dirname(spec), file.basename(spec)
if path == "" then path = "." end
@@ -12585,53 +12782,41 @@ function resolvers.finders.tree(specification,filetype)
for k=1,#hash do
local v = hash[k]
if find(v,pattern) then
- found[specification] = v
+ found[spec] = v
return v
end
end
end
- fnd = unpack(notfound) -- unpack ? why not just notfound[1]
- found[specification] = fnd
+ fnd = notfound() -- false
+ found[spec] = fnd
end
return fnd
end
function resolvers.locators.tree(specification)
- local spec = resolvers.splitmethod(specification)
- local path = spec.path
- if path ~= '' and lfs.isdir(path) then
+ local name = specification.filename
+ if name ~= '' and lfs.isdir(name) then
if trace_locating then
- report_resolvers("tree locator '%s' found (%s)",path,specification)
+ report_resolvers("tree locator '%s' found",name)
end
- resolvers.appendhash('tree',specification,path,false) -- don't cache
+ resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
- report_resolvers("tree locator '%s' not found",path)
+ report_resolvers("tree locator '%s' not found",name)
end
end
-function resolvers.hashers.tree(tag,name)
+function resolvers.hashers.tree(specification)
+ local name = specification.filename
if trace_locating then
- report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ report_resolvers("analysing tree '%s'",name)
end
- -- todo: maybe share with done above
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
+ resolvers.methodhandler("hashers",name)
end
-function resolvers.generators.tree(tag)
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
-end
-
-function resolvers.concatinators.tree(tag,path,name)
- return file.join(tag,path,name)
-end
-
-resolvers.isreadable.tree = file.isreadable
-resolvers.openers.tree = resolvers.openers.generic
-resolvers.loaders.tree = resolvers.loaders.generic
+resolvers.concatinators.tree = resolvers.concatinators.file
+resolvers.generators.tree = resolvers.generators.file
+resolvers.openers.tree = resolvers.openers.file
+resolvers.loaders.tree = resolvers.loaders.file
end -- of closure
@@ -12654,53 +12839,51 @@ local resolvers = resolvers
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-curl = curl or { }
-local curl = curl
+resolvers.curl = resolvers.curl or { }
+local curl = resolvers.curl
local cached = { }
-function curl.fetch(protocol, name) -- todo: use socket library
- local cleanname = gsub(name,"[^%a%d%.]+","-")
+local function runcurl(specification)
+ local original = specification.original
+ -- local scheme = specification.scheme
+ local cleanname = gsub(original,"[^%a%d%.]+","-")
local cachename = caches.setfirstwritablefile(cleanname,"curl")
- if not cached[name] then
+ if not cached[original] then
if not io.exists(cachename) then
- cached[name] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
+ cached[original] = cachename
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
os.spawn(command)
end
if io.exists(cachename) then
- cached[name] = cachename
+ cached[original] = cachename
else
- cached[name] = ""
+ cached[original] = ""
end
end
- return cached[name]
+ return cached[original]
end
-function finders.curl(protocol,filename)
- local foundname = curl.fetch(protocol, filename)
- return finders.generic(protocol,foundname,filetype)
-end
+-- old code: we could be cleaner using specification (see schemes)
-function openers.curl(protocol,filename)
- return openers.generic(protocol,filename)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",runcurl(specification),filetype)
end
-function loaders.curl(protocol,filename)
- return loaders.generic(protocol,filename)
-end
-
--- todo: metamethod
+local opener = openers.file
+local loader = loaders.file
-function curl.install(protocol)
- finders[protocol] = function (filename,filetype) return finders.curl(protocol,filename) end
- openers[protocol] = function (filename) return openers.curl(protocol,filename) end
- loaders[protocol] = function (filename) return loaders.curl(protocol,filename) end
+local function install(scheme)
+ finders[scheme] = finder
+ openers[scheme] = opener
+ loaders[scheme] = loader
end
-curl.install('http')
-curl.install('https')
-curl.install('ftp')
+resolvers.curl.install = install
+
+install('http')
+install('https')
+install('ftp')
end -- of closure
@@ -12777,7 +12960,7 @@ local function loaded(libpaths,name,simple)
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
@@ -12786,7 +12969,6 @@ local function loaded(libpaths,name,simple)
end
end
-
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! locating '%s'",name)
@@ -12824,7 +13006,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
@@ -12838,7 +13020,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
@@ -13375,6 +13557,7 @@ own.libs = { -- order can be made better
'data-pre.lua',
'data-inp.lua',
'data-out.lua',
+ 'data-fil.lua',
'data-con.lua',
'data-use.lua',
-- 'data-tex.lua',
diff --git a/tex/context/base/attr-col.lua b/tex/context/base/attr-col.lua
index 28e02edd6..24c76ae6d 100644
--- a/tex/context/base/attr-col.lua
+++ b/tex/context/base/attr-col.lua
@@ -70,9 +70,12 @@ colors.data = allocate()
colors.values = colors.values or { }
colors.registered = colors.registered or { }
+local a_color = attributes.private('color')
+local a_selector = attributes.private('colormodel')
+
colors.weightgray = true
-colors.attribute = attributes.private('color')
-colors.selector = attributes.private('colormodel')
+colors.attribute = a_color
+colors.selector = a_selector
colors.default = 1
colors.main = nil
colors.triggering = true
@@ -312,7 +315,7 @@ function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but
-- colors.reviver(color)
end
if name then
- list[colors.attribute][name] = color -- not grouped, so only global colors
+ list[a_color][name] = color -- not grouped, so only global colors
end
return registered[stamp]
end
diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua
index 9d4562e18..611280dfc 100644
--- a/tex/context/base/buff-ini.lua
+++ b/tex/context/base/buff-ini.lua
@@ -11,258 +11,127 @@ local trace_visualize = false trackers.register("buffers.visualize", function(v
local report_buffers = logs.new("buffers")
-local utf = unicode.utf8
-
--- todo: weed the next list
-
-local concat, texprint, texwrite = table.concat, tex.print, tex.write
-local utfbyte, utffind, utfgsub = utf.byte, utf.find, utf.gsub
+local concat = table.concat
local type, next = type, next
-local huge = math.huge
-local byte, sub, find, char, gsub, rep, lower, format, gmatch, match, count = string.byte, string.sub, string.find, string.char, string.gsub, string.rep, string.lower, string.format, string.gmatch, string.match, string.count
-local splitlines, escapedpattern = string.splitlines, string.escapedpattern
-local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
+local sub, format, count, splitlines = string.sub, string.format, string.count, string.splitlines
+
local variables = interfaces.variables
-local lpegmatch = lpeg.match
local settings_to_array = utilities.parsers.settings_to_array
-local allocate = utilities.storage.allocate
-local tabtospace = utilities.strings.tabtospace
-local texsprint, texprint, ctxcatcodes = tex.sprint, tex.print, tex.ctxcatcodes
+local texprint, ctxcatcodes = tex.print, tex.ctxcatcodes
-buffers = {
- data = allocate(),
- flags = { },
-}
+buffers = { }
local buffers = buffers
local context = context
-local data = buffers.data
-local flags = buffers.flags
+local data = { }
function buffers.raw(name)
- return data[name] or { }
+ return data[name] or ""
end
-function buffers.erase(name)
+local function erase(name)
data[name] = nil
end
-function buffers.set(name, str)
- data[name] = { str } -- CHECK THIS
+local function assign(name,str)
+ data[name] = str
end
-function buffers.append(name, str)
+local function append(name,str)
data[name] = (data[name] or "") .. str
end
-buffers.flags.storeastable = true
-
--- to be sorted out: crlf + \ ; slow now
-
-local n = 0
-
-function buffers.grab(name,begintag,endtag,bufferdata)
- local dn = data[name] or ""
- if dn == "" then
- buffers.level = 0
- end
- buffers.level = buffers.level + count(bufferdata,"\\"..begintag) - count(bufferdata,"\\"..endtag)
- local more = buffers.level > 0
- if more then
- dn = dn .. bufferdata .. endtag
- buffers.level = buffers.level - 1
- else
- if dn == "" then
- dn = sub(bufferdata,1,#bufferdata-1)
- else
- dn = dn .. "\n" .. sub(bufferdata,1,#bufferdata-1)
- end
- dn = gsub(dn,"[\010\013]$","")
- if flags.storeastable then
- dn = splitlines(dn)
- end
- end
- data[name] = dn
- commands.testcase(more)
-end
-
-function buffers.exists(name)
+local function exists(name)
return data[name] ~= nil
end
-function buffers.doifelsebuffer(name)
- commands.testcase(data[name] ~= nil)
+local function getcontent(name)
+ return data[name] or ""
end
-function buffers.strip(lines,first,last)
- local first, last = first or 1, last or #lines
- for i=first,last do
- local li = lines[i]
- if #li == 0 or find(li,"^%s*$") then
- first = first + 1
- else
- break
- end
- end
- for i=last,first,-1 do
- local li = lines[i]
- if #li == 0 or find(li,"^%s*$") then
- last = last - 1
- else
- break
- end
- end
- return first, last, last - first + 1
+local function getlines(name)
+ local d = name and data[name]
+ return d and splitlines(d)
end
-function buffers.range(lines,first,last,range) -- 1,3 1,+3 fromhere,tothere
- local first, last = first or 1, last or #lines
- if last < 0 then
- last = #lines + last
- end
- local what = settings_to_array(range)
- local r_first, r_last = what[1], what[2]
- local f, l = tonumber(r_first), tonumber(r_last)
- if r_first then
- if f then
- if f > first then
- first = f
- end
- else
- for i=first,last do
- if find(lines[i],r_first) then
- first = i + 1
- break
- end
- end
- end
+local function collectcontent(names,separator) -- no print
+ if type(names) == "string" then
+ names = settings_to_array(names)
end
- if r_last then
- if l then
- if l < 0 then
- l = #lines + l
- end
- if find(r_last,"^[%+]") then -- 1,+3
- l = first + l
- end
- if l < last then
- last = l
- end
- else
- for i=first,last do
- if find(lines[i],r_last) then
- last = i - 1
- break
- end
+ if #names == 1 then
+ return getcontent(names[1])
+ else
+ local t, n = { }, 0
+ for i=1,#names do
+ local c = getcontent(names[i])
+ if c ~= "" then
+ n = n + 1
+ t[n] = c
end
end
+ return concat(t,separator or "\r") -- "\n" is safer due to comments and such
end
- return first, last
end
--- this will go to buff-ver.lua
+buffers.erase = erase
+buffers.assign = assign
+buffers.append = append
+buffers.exists = exists
+buffers.getcontent = getcontent
+buffers.getlines = getlines
+buffers.collectcontent = collect
--- there is some overlap in the following
+-- the context interface
-flags.tablength = 7
+commands.erasebuffer = erase
+commands.assignbuffer = assign
-local function flush(content,method,settings)
- local tab = settings.tab
- tab = tab and (tab == variables.yes and flags.tablength or tonumber(tab))
- if tab then
- content = utilities.strings.tabtospace(content,tab)
- end
- local visualizer = settings.visualizer
- if visualizer and visualizer ~= "" then
- visualizers.visualize(visualizer,method,content,settings)
- else -- todo:
- visualizers.visualize("",method,content,settings)
- end
-end
+local P, patterns, lpegmatch = lpeg.P, lpeg.patterns, lpeg.match
-local function filter(lines,settings) -- todo: inline or display in settings
- local strip = settings.strip
- if strip then
- lines = buffers.realign(lines,strip)
- end
- local line, n = 0, 0
- local first, last, m = buffers.strip(lines)
- if range then
- first, last = buffers.range(lines,first,last,range)
- first, last = buffers.strip(lines,first,last)
- end
- local content = concat(lines,(settings.nature == "inline" and " ") or "\n",first,last)
- return content, m
+local function countnesting(b,e)
+ local n
+ local g = P(b) / function() n = n + 1 end
+ + P(e) / function() n = n - 1 end
+ + patterns.anything
+ local p = patterns.alwaysmatched / function() n = 0 end
+ * g^0
+ * patterns.alwaysmatched / function() return n end
+ return p
end
-function buffers.typestring(settings) -- todo: settings.nature = "inline"
- local content = settings.data
- if content and content ~= "" then
- flush(content,"inline",settings)
- end
-end
+local counters = { }
+local nesting = 0
-function buffers.typebuffer(settings) -- todo: settings.nature = "display"
- local name = settings.name
- local lines = name and data[name]
- if lines then
- if type(lines) == "string" then
- lines = splitlines(lines)
- data[name] = lines
- end
- local content, m = filter(lines,settings)
- if content and content ~= "" then
- flush(content,"display",settings)
- end
+function commands.grabbuffer(name,begintag,endtag,bufferdata) -- maybe move \\ to call
+ local dn = getcontent(name)
+ if dn == "" then
+ nesting = 0
end
-end
-
-function buffers.processbuffer(settings) -- nearly the same
- local name = settings.name
- local lines = name and data[name]
- if lines then
- -- dodo: process only and feedback
- if type(lines) == "string" then
- lines = splitlines(lines)
- data[name] = lines
- end
- local content, m = filter(lines,settings)
- if content and content ~= "" then
- flush(content,"direct",settings)
- end
+ -- nesting = nesting + count(bufferdata,"\\"..begintag) - count(bufferdata,"\\"..endtag)
+ local counter = counters[begintag]
+ if not counter then
+ counter = countnesting(begintag,endtag)
+ counters[begintag] = counter
end
-end
-
-function buffers.typefile(settings) -- todo: settings.nature = "display"
- local name = settings.name
- local str = buffers.loaddata(name)
- if str and str ~= "" then
- local regime = settings.regime
- if regime and regime ~= "" then
- regimes.load(regime)
- str = regimes.translate(str,regime)
+ nesting = nesting + lpegmatch(counter,bufferdata)
+ local more = nesting > 0
+ if more then
+ dn = dn .. bufferdata .. endtag
+ nesting = nesting - 1
+ else
+ if dn == "" then
+ dn = sub(bufferdata,1,-2)
+ else
+ dn = dn .. "\n" .. sub(bufferdata,1,-2)
end
- if str and str~= "" then
- local lines = splitlines(str)
- local content, m = filter(lines,settings)
- if content and content ~= "" then
- flush(content,"display",settings)
- end
+ local last = sub(dn,-1)
+ if last == "\n" or last == "\r" then
+ dn = sub(dn,1,-2)
end
end
-end
-
-function buffers.loaddata(filename) -- this one might go away or become local
- local foundname = resolvers.findtexfile(filename) or ""
- if foundname == "" then
- foundname = resolvers.findtexfile(file.addsuffix(filename,'tex')) or ""
- end
- if foundname == "" then
- return ""
- else
- return resolvers.loadtexfile(foundname)
- end
+ assign(name,dn)
+ commands.testcase(more)
end
-- The optional prefix hack is there for the typesetbuffer feature and
@@ -275,7 +144,7 @@ local function prepared(name,list) -- list is optional
if not name or name == "" then
name = tex.jobname .. "-" .. list .. ".tmp"
end
- local content = buffers.collect(list,nil) or ""
+ local content = collectcontent(list,nil) or ""
if content == "" then
content = "empty buffer"
end
@@ -285,19 +154,12 @@ end
local capsule = "\\starttext\n%s\n\\stoptext\n"
local command = "context %s"
-function buffers.save(name,list,encapsulate) -- list is optional
- local name, content = prepared(name,list)
- io.savedata(name, (encapsulate and format(capsule,content)) or content)
-end
-
-function commands.savebuffer(list,name) -- name is optional
- buffers.save(name,list)
-end
-
-function buffers.run(name,list,encapsulate)
+function commands.runbuffer(name,list,encapsulate)
local name, content = prepared(name,list)
+ if encapsulate then
+ content = format(capsule,content)
+ end
local data = io.loaddata(name)
- content = (encapsulate and format(capsule,content)) or content
if data ~= content then
if trace_run then
commands.writestatus("buffers","changes in '%s', processing forced",name)
@@ -309,39 +171,21 @@ function buffers.run(name,list,encapsulate)
end
end
-local printer = (lpeg.patterns.textline/texprint)^0 -- not the right one, we can use context(b)
-
-function buffers.get(name)
- local b = data[name]
- if b then
- context.viafile(b)
---~ if type(b) == "table" then
---~ for i=1,#b do
---~ texprint(b[i])
---~ end
---~ else
---~ lpegmatch(printer,b)
---~ end
- end
+function commands.savebuffer(list,name) -- name is optional
+ local name, content = prepared(name,list)
+ io.savedata(name,content)
end
-local function content(name,separator) -- no print
- local b = data[name]
- if b then
- if type(b) == "table" then
- return concat(b,separator or "\n")
- else
- return b
- end
- else
- return ""
- end
+function commands.getbuffer(name)
+ context.viafile(data[name])
end
-buffers.content = content
+function commands.getbuffermkvi(name)
+ context.viafile(resolvers.macros.preprocessed(getcontent(name)))
+end
-function buffers.evaluate(name)
- local ok = loadstring(content(name))
+function commands.getbufferctxlua(name)
+ local ok = loadstring(getcontent(name))
if ok then
ok()
else
@@ -349,110 +193,14 @@ function buffers.evaluate(name)
end
end
--- maybe we should keep buffers unsplit and only split when needed
--- or better: we need a tex.sprint that splits on newlines (\r \n or
--- \r\n)
-
-local function n_content(s)
- flush(contentcatcodes,s)
-end
-
-local function n_endofline()
- texsprint(" ")
-end
-
-local function n_emptyline()
- texprint("")
-end
-
-local function n_simpleline()
- texprint("")
-end
-
-function buffers.mkvi(name,raw)
- local lines = content(name)
- lines = resolvers.macros.preprocessed(lines)
- -- context.printlines(lines,raw)
- context.viafile(lines)
-end
-
-function buffers.collect(names,separator) -- no print
- -- maybe we should always store a buffer as table so
- -- that we can pass it directly
- if type(names) == "string" then
- names = settings_to_array(names)
- end
- local t, n = { }, 0
- for i=1,#names do
- local c = content(names[i],separator)
- if c ~= "" then
- n = n + 1
- t[n] = c
- end
- end
- return concat(t,separator or "\r") -- "\n" is safer due to comments and such
-end
-
-function buffers.feedback(names,separator) -- we can use cld
- -- don't change the texprint into texsprint as it fails on mp buffers
- -- because (p<nl>enddef) becomes penddef then
- texprint(ctxcatcodes,splitlines(buffers.collect(names,separator)))
+function commands.doifelsebuffer(name)
+ commands.testcase(exists(name))
end
-local function tobyte(c)
- return " [" .. utfbyte(c) .. "] "
-end
+-- This only used for mp buffers and is a kludge. Don't
+-- change the texprint into texsprint as it fails because
+-- "p<nl>enddef" becomes "penddef" then.
-function buffers.inspect(name)
- local b = data[name]
- if b then
- if type(b) == "table" then
- for k=1,#b do
- local v = b[k]
- context(v == "" and "[crlf]" or gsub(v,"(.)",tobyte))
- par()
- end
- else
- context((gsub(b,"(.)",tobyte)))
- end
- end
-end
-
-function buffers.realign(name,forced_n) -- no, auto, <number>
- local n, d
- if type(name) == "string" then
- d = data[name]
- if type(d) == "string" then
- d = splitlines(d)
- end
- else
- d = name -- already a buffer
- end
- forced_n = (forced_n == variables.auto and huge) or tonumber(forced_n)
- if forced_n then
- for i=1, #d do
- local spaces = find(d[i],"%S")
- if not spaces then
- -- empty line
- elseif not n then
- n = spaces
- elseif spaces == 0 then
- n = 0
- break
- elseif n > spaces then
- n = spaces
- end
- end
- if n > 0 then
- if n > forced_n then
- n = forced_n
- end
- for i=1,#d do
- d[i] = sub(d[i],n)
- end
- end
- end
- return d
+function commands.feedback(names)
+ texprint(ctxcatcodes,splitlines(collectcontent(names)))
end
-
--- escapes: buffers.setescapepair("tex","/BTEX","/ETEX")
diff --git a/tex/context/base/buff-ini.mkiv b/tex/context/base/buff-ini.mkiv
index 403bbf55f..221d9e4b1 100644
--- a/tex/context/base/buff-ini.mkiv
+++ b/tex/context/base/buff-ini.mkiv
@@ -25,13 +25,13 @@
\let\currentbuffer\empty
\def\doifelsebuffer#1%
- {\ctxlua{buffers.doifelsebuffer("#1")}}
+ {\ctxlua{commands.doifelsebuffer("#1")}}
\def\resetbuffer
{\dosingleempty\doresetbuffer}
\def\doresetbuffer[#1]%
- {\ctxlua{buffers.erase("#1")}}
+ {\ctxlua{commands.erasebuffer("#1")}}
\unexpanded\def\dostartdefinedbuffer
{\bgroup
@@ -65,13 +65,13 @@
\unexpanded\long\def\dodowithbuffer#1#2#3#4#5% name, startsequence, stopsequence, before, after
{#4%
\bgroup
- \ctxlua{buffers.erase("#1")}%
+ \ctxlua{commands.erasebuffer("#1")}%
\setcatcodetable \vrbcatcodes
\long\def\nododowithbuffer
{\egroup
#5}%
\long\def\dododowithbuffer##1#3% is detokenize needed? TEST
- {\ctxlua{buffers.grab("#1","#2","#3",\!!bs\detokenize{##1}\!!es)} % space ?
+ {\ctxlua{commands.grabbuffer("#1","#2","#3",\!!bs\detokenize{##1}\!!es)} % space ?
\dododowithbuffer
\nododowithbuffer}%
\dododowithbuffer}
@@ -82,7 +82,7 @@
\let\endbuffer\relax
\long\def\dosetbuffer[#1]#2\endbuffer % seldom used so we just pass #2
- {\ctxlua{buffers.set("#1", \!!bs\detokenize{#2}\!!es)}}
+ {\ctxlua{commands.assignbuffer("#1", \!!bs\detokenize{#2}\!!es)}}
\def\namedbufferparameter#1#2{\csname\??bu#1#2\endcsname}
@@ -147,13 +147,7 @@
\namedbufferparameter{#1}\c!after}
\def\dododogetbuffer#1%
- {\ctxlua{buffers.get("#1")}}
-
-\def\inspectbuffer
- {\dosingleempty\doinspectbuffer}
-
-\def\doinspectbuffer[#1]%
- {\ctxlua{buffers.inspect("#1")}}
+ {\ctxlua{commands.getbuffer("#1")}}
\definebuffer[\v!hiding] \setupbuffer[\v!hiding][\c!before=,\c!after=]
@@ -190,8 +184,7 @@
\def\mkvibuffer {\dosingleempty\domkvibuffer}
\def\mkvibufferraw{\dosingleempty\domkvibufferraw}
-\def\doctxluabuffer [#1]{\ctxlua{buffers.evaluate("#1")}}
-\def\domkvibuffer [#1]{\ctxlua{buffers.mkvi("#1")}}
-%def\domkvibufferraw[#1]{\ctxlua{buffers.mkvi("#1",true)}}
+\def\doctxluabuffer [#1]{\ctxlua{commands.getbufferctxlua("#1")}}
+\def\domkvibuffer [#1]{\ctxlua{commands.getbuffermkvi("#1")}}
\protect \endinput
diff --git a/tex/context/base/buff-ver.lua b/tex/context/base/buff-ver.lua
index cee02c52d..c8f24c813 100644
--- a/tex/context/base/buff-ver.lua
+++ b/tex/context/base/buff-ver.lua
@@ -9,14 +9,16 @@ if not modules then modules = { } end modules ['buff-ver'] = {
-- The default visualizers have reserved names starting with v-*. Users are
-- supposed to use different names for their own variants.
-local type, rawset, rawget, setmetatable, getmetatable = type, rawset, rawget, setmetatable, getmetatable
-local format, lower, match = string.format, string.lower, string.match
+local type, next, rawset, rawget, setmetatable, getmetatable = type, next, rawset, rawget, setmetatable, getmetatable
+local format, lower, match, find, sub = string.format, string.lower, string.match, string.find, string.sub
+local splitlines = string.splitlines
+local concat = table.concat
local C, P, V, Carg = lpeg.C, lpeg.P, lpeg.V, lpeg.Carg
-local patterns, lpegmatch, lpegtype = lpeg.patterns, lpeg.match, lpeg.type
+local patterns, lpegmatch, is_lpeg = lpeg.patterns, lpeg.match, lpeg.is_lpeg
-local function is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
+local tabtospace = utilities.strings.tabtospace
+local variables = interfaces.variables
+local settings_to_array = utilities.parsers.settings_to_array
visualizers = visualizers or { }
@@ -27,7 +29,8 @@ local variables = interfaces.variables
local findfile = resolvers.findfile
local addsuffix = file.addsuffix
-local v_yes = variables.yes
+local v_auto = variables.auto
+local v_yes = variables.yes
-- beware, these all get an argument (like newline)
@@ -48,7 +51,7 @@ local doverbatimspace = context.doverbatimspace
local CargOne = Carg(1)
local function f_emptyline(s,settings)
- if settings and settings.currentnature == "inline" then
+ if settings and settings.nature == "inline" then
doinlineverbatimemptyline()
else
dodisplayverbatimemptyline()
@@ -56,7 +59,7 @@ local function f_emptyline(s,settings)
end
local function f_beginline(s,settings)
- if settings and settings.currentnature == "inline" then
+ if settings and settings.nature == "inline" then
doinlineverbatimbeginline()
else
dodisplayverbatimbeginline()
@@ -64,7 +67,7 @@ local function f_beginline(s,settings)
end
local function f_newline(s,settings)
- if settings and settings.currentnature == "inline" then
+ if settings and settings.nature == "inline" then
doinlineverbatimnewline()
else
dodisplayverbatimnewline()
@@ -72,7 +75,7 @@ local function f_newline(s,settings)
end
local function f_start(s,settings)
- if settings and settings.currentnature == "inline" then
+ if settings and settings.nature == "inline" then
doinlineverbatimstart()
else
dodisplayverbatimstart()
@@ -80,7 +83,7 @@ local function f_start(s,settings)
end
local function f_stop(s,settings)
- if settings and settings.currentnature == "inline" then
+ if settings and settings.nature == "inline" then
doinlineverbatimstop()
else
dodisplayverbatimstop()
@@ -293,8 +296,9 @@ end
local escapedvisualizers = { }
-local function visualize(method,nature,content,settings) -- maybe also method and nature in settings
+local function visualize(content,settings) -- maybe also method in settings
if content and content ~= "" then
+ local method = settings.method or "default"
local m
local e = settings.escape
if e and e ~= "" then
@@ -325,8 +329,8 @@ local function visualize(method,nature,content,settings) -- maybe also method an
else
m = specifications[method] or specifications.default
end
+ local nature = settings.nature or "display"
local n = m and m[nature]
- settings.currentnature = nature or settings.nature or "display" -- tricky ... why sometimes no nature
if n then
n(content,settings)
else
@@ -338,16 +342,27 @@ end
visualizers.visualize = visualize
visualizers.getvisualizer = getvisualizer
-function visualizers.visualizestring(method,content,settings)
- visualize(method,"inline",content)
+local function checkedsettings(settings,nature)
+ if not settings then
+ return { nature = nature }
+ else
+ if not settings.nature then
+ settings.nature = nature
+ end
+ return settings
+ end
end
-function visualizers.visualizefile(method,name,settings)
- visualize(method,"display",resolvers.loadtexfile(name),settings)
+function visualizers.visualizestring(content,settings)
+ visualize(content,checkedsettings(settings,"inline"))
end
-function visualizers.visualizebuffer(method,name,settings)
- visualize(method,"display",buffers.content(name),settings)
+function visualizers.visualizefile(name,settings)
+ visualize(resolvers.loadtexfile(name),checkedsettings(settings,"display"))
+end
+
+function visualizers.visualizebuffer(name,settings)
+ visualize(buffers.getcontent(name),checkedsettings(settings,"display"))
end
-- --
@@ -375,3 +390,182 @@ function visualizers.writeargument(...)
write(...) -- use a faster print to tex variant for the
context("}") -- { } tokens as they always have ctxcatcodes.
end
+
+-- helpers
+
+local function realign(lines,forced_n) -- no, auto, <number>
+ forced_n = (forced_n == v_auto and huge) or tonumber(forced_n)
+ if forced_n then
+ local n = 0
+ for i=1, #lines do
+ local spaces = find(lines[i],"%S")
+ if not spaces then
+ -- empty line
+ elseif not n then
+ n = spaces
+ elseif spaces == 0 then
+ n = 0
+ break
+ elseif n > spaces then
+ n = spaces
+ end
+ end
+ if n > 0 then
+ if n > forced_n then
+ n = forced_n
+ end
+ for i=1,#d do
+ lines[i] = sub(lines[i],n)
+ end
+ end
+ end
+ return lines
+end
+
+local function getstrip(lines,first,last)
+ local first, last = first or 1, last or #lines
+ for i=first,last do
+ local li = lines[i]
+ if #li == 0 or find(li,"^%s*$") then
+ first = first + 1
+ else
+ break
+ end
+ end
+ for i=last,first,-1 do
+ local li = lines[i]
+ if #li == 0 or find(li,"^%s*$") then
+ last = last - 1
+ else
+ break
+ end
+ end
+ return first, last, last - first + 1
+end
+
+local function getrange(lines,first,last,range) -- 1,3 1,+3 fromhere,tothere
+ local noflines = #lines
+ local first, last = first or 1, last or noflines
+ if last < 0 then
+ last = noflines + last
+ end
+ local what = settings_to_array(range)
+ local r_first, r_last = what[1], what[2]
+ local f, l = tonumber(r_first), tonumber(r_last)
+ if r_first then
+ if f then
+ if f > first then
+ first = f
+ end
+ else
+ for i=first,last do
+ if find(lines[i],r_first) then
+ first = i + 1
+ break
+ end
+ end
+ end
+ end
+ if r_last then
+ if l then
+ if l < 0 then
+ l = noflines + l
+ end
+ if find(r_last,"^[%+]") then -- 1,+3
+ l = first + l
+ end
+ if l < last then
+ last = l
+ end
+ else
+ for i=first,last do
+ if find(lines[i],r_last) then
+ last = i - 1
+ break
+ end
+ end
+ end
+ end
+ return first, last
+end
+
+local tablength = 7
+
+local function flush(content,settings)
+ local tab = settings.tab
+ tab = tab and (tab == v_yes and tablength or tonumber(tab))
+ if tab then
+ content = tabtospace(content,tab)
+ end
+ visualize(content,settings)
+end
+
+local function filter(lines,settings) -- todo: inline or display in settings
+ local strip = settings.strip
+ if strip == v_yes then
+ lines = realign(lines,strip)
+ end
+ local line, n = 0, 0
+ local first, last, m = getstrip(lines)
+ if range then
+ first, last = getrange(lines,first,last,range)
+ first, last = getstrip(lines,first,last)
+ end
+ local content = concat(lines,(settings.nature == "inline" and " ") or "\n",first,last)
+ return content, m
+end
+
+-- main functions
+
+local getlines = buffers.getlines
+
+function commands.typebuffer(settings)
+ local lines = getlines(settings.name)
+ if lines then
+ local content, m = filter(lines,settings)
+ if content and content ~= "" then
+ flush(content,checkedsettings(settings,"display"))
+ end
+ end
+end
+
+function commands.processbuffer(settings)
+ local lines = getlines(settings.name)
+ if lines then
+ local content, m = filter(lines,settings)
+ if content and content ~= "" then
+ flush(content,checkedsettings(settings,"direct"))
+ end
+ end
+end
+
+-- not really buffers but it's closely related
+
+function commands.typestring(settings)
+ local content = settings.data
+ if content and content ~= "" then
+ flush(content,checkedsettings(settings,"inline"))
+ end
+end
+
+function commands.typefile(settings)
+ local filename = settings.name
+ local foundname = resolvers.findtexfile(filename)
+ if foundname and foundname ~= "" then
+ local str = resolvers.loadtexfile(foundname)
+ if str and str ~= "" then
+ local regime = settings.regime
+ if regime and regime ~= "" then
+ regimes.load(regime)
+ str = regimes.translate(str,regime)
+ end
+ if str and str~= "" then
+ local lines = splitlines(str)
+ local content, m = filter(lines,settings)
+ if content and content ~= "" then
+ flush(content,checkedsettings(settings,"display"))
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv
index 6dd540385..f4402eb41 100644
--- a/tex/context/base/buff-ver.mkiv
+++ b/tex/context/base/buff-ver.mkiv
@@ -331,10 +331,11 @@
\def\dodotypenormal#1%
{\secondstageinitializetype
\dostarttagged\t!verbatim\currenttype
- \ctxlua{buffers.typestring {
- data = \!!bs\detokenize{#1}\!!es,
- tab = "\typeparameter\c!tab",
- visualizer = "\typeparameter\c!option",
+ \ctxlua{commands.typestring {
+ data = \!!bs\detokenize{#1}\!!es,
+ tab = "\typeparameter\c!tab",
+ method = "\typeparameter\c!option",
+ nature = "inline",
}}%
\dostoptagged
\egroup}
@@ -342,11 +343,12 @@
\def\dodotypenested#1%
{\secondstageinitializetype
\dostarttagged\t!verbatim\currenttype
- \ctxlua{buffers.typestring {
- data = \!!bs\detokenize{#1}\!!es,
- tab = "\typeparameter\c!tab",
- visualizer = "nested", % we force a special visualizer
- option = "\typeparameter\c!option", % extra visualizer (maybe: nested,\typeparameter\c!option)
+ \ctxlua{commands.typestring {
+ data = \!!bs\detokenize{#1}\!!es,
+ tab = "\typeparameter\c!tab",
+ method = "nested", % we force a special visualizer
+ option = "\typeparameter\c!option", % extra visualizer (maybe: nested,\typeparameter\c!option)
+ nature = "inline",
}}%
\dostoptagged
\egroup
@@ -449,13 +451,14 @@
{\secondstageinitializetyping
\beginofverbatimlines
\dostarttagged\t!verbatimblock\currenttyping
- \ctxlua{buffers.typebuffer {
- name = "_typing_",
- strip = "\typingparameter\c!strip",
- range = "\typingparameter\c!range",
- tab = "\typingparameter\c!tab",
- visualizer = "\typingparameter\c!option",
- escape = \!!bs\typingparameter\c!escape\!!es,
+ \ctxlua{commands.typebuffer {
+ name = "_typing_",
+ strip = "\typingparameter\c!strip",
+ range = "\typingparameter\c!range",
+ tab = "\typingparameter\c!tab",
+ method = "\typingparameter\c!option",
+ escape = \!!bs\typingparameter\c!escape\!!es,
+ nature = "display",
}}%
\dostoptagged
\endofverbatimlines
@@ -560,13 +563,14 @@
\secondstageinitializetyping
\beginofverbatimlines
\dostarttagged\t!verbatimblock\currenttyping
- \ctxlua{buffers.typefile {
- name = "#2",
- strip = "\typingparameter\c!strip",
- range = "\typingparameter\c!range",
- regime = "\currentregime",
- tab = "\typingparameter\c!tab",
- visualizer = "\typingparameter\c!option",
+ \ctxlua{commands.typefile {
+ name = "#2",
+ strip = "\typingparameter\c!strip",
+ range = "\typingparameter\c!range",
+ regime = "\currentregime",
+ tab = "\typingparameter\c!tab",
+ method = "\typingparameter\c!option",
+ nature = "display",
}}%
\dostoptagged
\endofverbatimlines
@@ -721,13 +725,14 @@
\secondstageinitializetyping
\beginofverbatimlines
\dostarttagged\t!verbatim{#1}%
- \ctxlua{buffers.typebuffer {
- name = "#2",
- strip = "\typingparameter\c!strip",
- range = "\typingparameter\c!range",
- regime = "\currentregime",
- tab = "\typingparameter\c!tab",
- visualizer = "\typingparameter\c!option",
+ \ctxlua{commands.typebuffer {
+ name = "#2",
+ strip = "\typingparameter\c!strip",
+ range = "\typingparameter\c!range",
+ regime = "\currentregime",
+ tab = "\typingparameter\c!tab",
+ method = "\typingparameter\c!option",
+ nature = "display",
}}%
\dostoptagged
\endofverbatimlines
@@ -760,12 +765,12 @@
\def\dodoprocessbuffer#1#2%
{\edef\currenttyping{#1}%
- \ctxlua{buffers.processbuffer {
- name = "#2",
- strip = "\typingparameter\c!strip",
- tab = "\typingparameter\c!tab",
- visualizer = "\typingparameter\c!option",
- nature = "direct",
+ \ctxlua{commands.processbuffer {
+ name = "#2",
+ strip = "\typingparameter\c!strip",
+ tab = "\typingparameter\c!tab",
+ method = "\typingparameter\c!option",
+ nature = "direct",
}}}
% line numbering, keep broken lines together
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index 27e87d29c..c509231e3 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -245,8 +245,10 @@ function utffilters.collapse(str) -- not really tested (we could preallocate a t
return str
end
-utilities.sequencers.appendaction (resolvers.openers.textfileactions,"system","characters.filters.utf.collapse")
-utilities.sequencers.disableaction(resolvers.openers.textfileactions,"characters.filters.utf.collapse")
+local textfileactions = resolvers.openers.helpers.textfileactions
+
+utilities.sequencers.appendaction (textfileactions,"system","characters.filters.utf.collapse")
+utilities.sequencers.disableaction(textfileactions,"characters.filters.utf.collapse")
--[[ldx--
<p>Next we implement some commands that are used in the user interface.</p>
diff --git a/tex/context/base/cldf-com.lua b/tex/context/base/cldf-com.lua
index 9d03a450a..4c5e106a7 100644
--- a/tex/context/base/cldf-com.lua
+++ b/tex/context/base/cldf-com.lua
@@ -63,6 +63,8 @@ function context.utfchar(k)
context(utfchar(k))
end
+-- plain variants
+
function context.chardef(cs,u)
context(format([[\chardef\%s=%s\relax]],k))
end
@@ -78,3 +80,15 @@ end
function context.egroup()
context("}")
end
+
+local rule = nodes.pool.rule
+
+function context.hrule(w,h,d,dir)
+ if type(w) == "table" then
+ context(rule(w.width,w.height,w.depth,w.dir))
+ else
+ context(rule(w,h,d,dir))
+ end
+end
+
+context.vrule = context.hrule
diff --git a/tex/context/base/colo-ext.mkiv b/tex/context/base/colo-ext.mkiv
index 5fda96692..bc1c2fc41 100644
--- a/tex/context/base/colo-ext.mkiv
+++ b/tex/context/base/colo-ext.mkiv
@@ -76,14 +76,25 @@
\setupcolors[\c!intent=\v!none]
% A goodie that replaces the startMPcolor hackery
-%
-% \definecolor[red-t] [r=1,t=0.5,a=1]
+
+%\definecolor[red-t] [r=1,t=0.5,a=1]
% \definecolor[green-t][g=1,t=0.5,a=1]
+%
% \defineintermediatecolor[mycolora][0.5,red,green]
% \defineintermediatecolor[mycolorb][0.5,red-t,green-t]
+%
+% \definecolor[mycolorc][.5(blue,red)]
+% \definecolor[mycolord][.5(blue)]
+%
+% \enabledirectives[colors.pgf]
+% \definecolor[mycolorx][magenta!50!yellow]
+%
% \starttext
% test {\mycolora OEPS} test
% test {\mycolorb OEPS} test
+% test {\mycolorc OEPS} test
+% test {\mycolord OEPS} test
+% test {\mycolorx OEPS} test
% \stoptext
\unexpanded\def\defineintermediatecolor
diff --git a/tex/context/base/colo-icc.lua b/tex/context/base/colo-icc.lua
index fd30b63e4..fb2f83983 100644
--- a/tex/context/base/colo-icc.lua
+++ b/tex/context/base/colo-icc.lua
@@ -19,9 +19,9 @@ local cleaned = invalid^0 * Cs((1-invalid)^0)
function colors.iccprofile(filename,verbose)
local fullname = resolvers.findfile(filename,"icc") or ""
if fullname == "" then
- local locate = resolvers.finders.loc -- not in mtxrun
+ local locate = resolvers.finders.byscheme -- not in mtxrun
if locate then
- fullname = locate(filename) -- could be specific to the project
+ fullname = locate("loc",filename)
end
end
if fullname == "" then
diff --git a/tex/context/base/colo-ini.lua b/tex/context/base/colo-ini.lua
index cf8a997cf..238693bd0 100644
--- a/tex/context/base/colo-ini.lua
+++ b/tex/context/base/colo-ini.lua
@@ -9,6 +9,8 @@ if not modules then modules = { } end modules ['colo-ini'] = {
local type, tonumber = type, tonumber
local concat = table.concat
local format, gmatch, gsub, lower, match, find = string.format, string.gmatch, string.gsub, string.lower, string.match, string.find
+local P, R, C, Cc = lpeg.P, lpeg.R, lpeg.C, lpeg.Cc
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local trace_define = false trackers.register("colors.define",function(v) trace_define = v end)
@@ -58,7 +60,7 @@ local function inheritcolor(name, ca, global)
if trace_define then
commands.writestatus("color","inherit global color '%s' with attribute: %s",name,ca)
end
- context.colordeffgc(name,ca)
+ context.colordeffgc(name,ca) -- some day we will set the macro directly
else
if trace_define then
commands.writestatus("color","inherit local color '%s' with attribute: %s",name,ca)
@@ -243,11 +245,47 @@ function colors.definesimplegray(name,s)
return register_color(name,'gray',s) -- we still need to get rid of 'color'
end
+local hexdigit = R("09","AF","af")
+local hexnumber = hexdigit * hexdigit / function(s) return tonumber(s,16)/255 end + Cc(0)
+local hexpattern = hexnumber^-3 * P(-1)
+local hexcolor = Cc("H") * P("#") * hexpattern
+
+local left = P("(")
+local right = P(")")
+local comma = P(",")
+local mixnumber = lpegpatterns.number / tonumber
+local mixname = C(P(1-left-right-comma)^1)
+local mixcolor = Cc("M") * mixnumber * left * mixname * (comma * mixname)^-1 * right * P(-1)
+
+local exclamation = P("!")
+local pgfnumber = lpegpatterns.digit^0 / function(s) return tonumber(s)/100 end
+local pgfname = C(P(1-exclamation)^1)
+local pgfcolor = Cc("P") * pgfname * exclamation * pgfnumber * (exclamation * pgfname)^-1 * P(-1)
+
+local specialcolor = hexcolor + mixcolor
+
+local l_color = attributes.list[a_color]
+local l_transparency = attributes.list[a_transparency]
+
+directives.register("colors.pgf",function(v)
+ if v then
+ specialcolor = hexcolor + mixcolor + pgfcolor
+ else
+ specialcolor = hexcolor + mixcolor
+ end
+end)
+
function colors.defineprocesscolor(name,str,global,freeze) -- still inconsistent color vs transparent
- local x = match(str,"^#(.+)$") -- for old times sake (if we need to feed from xml or so)
- if x then
- local r, g, b = match(x .. "000000","(..)(..)(..)") -- watch the 255
- definecolor(name, register_color(name,'rgb',(tonumber(r,16) or 0)/255,(tonumber(g,16) or 0)/255,(tonumber(b,16) or 0)/255), global)
+ local what, one, two, three = lpegmatch(specialcolor,str)
+ if what == "H" then
+ -- for old times sake (if we need to feed from xml or so)
+ definecolor(name, register_color(name,'rgb',one,two,three),global)
+ elseif what == "M" then
+ -- intermediate
+ return colors.defineintermediatecolor(name,one,l_color[two],l_color[three],l_transparency[two],l_transparency[three],"",global,freeze)
+ elseif what == "P" then
+ -- pgf for tikz
+ return colors.defineintermediatecolor(name,two,l_color[one],l_color[three],l_transparency[one],l_transparency[three],"",global,freeze)
else
local settings = settings_to_hash_strict(str)
if settings then
@@ -267,8 +305,8 @@ function colors.defineprocesscolor(name,str,global,freeze) -- still inconsistent
else
local x = settings.x or h
if x then
- r, g, b = match(x .. "000000","(..)(..)(..)") -- watch the 255
- definecolor(name, register_color(name,'rgb',(tonumber(r,16) or 0)/255,(tonumber(g,16) or 0)/255,(tonumber(b,16) or 0)/255), global)
+ r, g, b = lpegmatch(hexpattern,x) -- can be inlined
+ definecolor(name, register_color(name,'rgb',r,g,b), global)
else
definecolor(name, register_color(name,'gray',tonumber(s) or 0), global)
end
@@ -507,20 +545,35 @@ end
function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,specs,global,freeze)
fraction = tonumber(fraction) or 1
local one, two = colors.value(c_one), colors.value(c_two)
- if one and two then
- local csone, cstwo = one[1], two[1]
- if csone == cstwo then
- -- actually we can set all 8 values at once here but this is cleaner as we avoid
- -- problems with weighted gray conversions and work with original values
+ if one then
+ if two then
+ local csone, cstwo = one[1], two[1]
+ if csone == cstwo then
+ -- actually we can set all 8 values at once here but this is cleaner as we avoid
+ -- problems with weighted gray conversions and work with original values
+ local ca
+ if csone == 2 then
+ ca = register_color(name,'gray',f(one,two,2,fraction))
+ elseif csone == 3 then
+ ca = register_color(name,'rgb', f(one,two,3,fraction),f(one,two,4,fraction),f(one,two,5,fraction))
+ elseif csone == 4 then
+ ca = register_color(name,'cmyk',f(one,two,6,fraction),f(one,two,7,fraction),f(one,two,8,fraction),f(one,two,9,fraction))
+ else
+ ca = register_color(name,'gray',f(one,two,2,fraction))
+ end
+ definecolor(name,ca,global,freeze)
+ end
+ else
+ local csone = one[1]
local ca
if csone == 2 then
- ca = register_color(name,'gray',f(one,two,2,fraction))
+ ca = register_color(name,'gray',fraction*one[2])
elseif csone == 3 then
- ca = register_color(name,'rgb',f(one,two,3,fraction),f(one,two,4,fraction),f(one,two,5,fraction))
+ ca = register_color(name,'rgb', fraction*one[3],fraction*one[4],fraction*one[5])
elseif csone == 4 then
- ca = register_color(name,'cmyk',f(one,two,6,fraction),f(one,two,7,fraction),f(one,two,8,fraction),f(one,two,9,fraction))
+ ca = register_color(name,'cmyk',fraction*one[6],fraction*one[7],fraction*one[8],fraction*one[9])
else
- ca = register_color(name,'gray',f(one,two,2,fraction))
+ ca = register_color(name,'gray',fraction*one[2])
end
definecolor(name,ca,global,freeze)
end
@@ -530,7 +583,6 @@ function colors.defineintermediatecolor(name,fraction,c_one,c_two,a_one,a_two,sp
local ta = tonumber((t and t.a) or (one and one[1]) or (two and two[1]))
local tt = tonumber((t and t.t) or (one and two and f(one,two,2,fraction)))
if ta and tt then
---~ print(ta,tt)
definetransparent(name,transparencies.register(name,ta,tt),global)
end
end
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 9e97001b7..08307b0d5 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -37,12 +37,4 @@
\let\\=\crlf
-% for a while as these can be used in user modules
-
-% \startluacode
-% jobregisters = logs.obsolete("jobregisters", "structures.registers")
-% buffers.finish_state = logs.obsolete("buffers.finish_state","buffers.finishstate")
-% buffers.change_state = logs.obsolete("buffers.change_state","buffers.finishstate")
-% \stopluacode
-
\protect \endinput
diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex
index 97a4843d1..dfd4d8b7b 100644
--- a/tex/context/base/cont-new.tex
+++ b/tex/context/base/cont-new.tex
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2010.11.27 14:27}
+\newcontextversion{2010.12.01 15:28}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
@@ -699,9 +699,10 @@
% \stoptabulate}
\long\def\tableifelse#1%
- {\TABLEnoalign{#1%
- {\aftergroup \firstoftwoarguments}%
- {\aftergroup\secondoftwoarguments}}}
+ {\tablenoalign
+ {#1%
+ {\aftergroup \firstoftwoarguments}%
+ {\aftergroup\secondoftwoarguments}}}
\long \def\tableiftextelse#1{\tableifelse{\doiftextelse{#1}}}
diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex
index 8623788f2..0d9645f59 100644
--- a/tex/context/base/context.tex
+++ b/tex/context/base/context.tex
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2010.11.27 14:27}
+\edef\contextversion{2010.12.01 15:28}
%D For those who want to use this:
diff --git a/tex/context/base/data-bin.lua b/tex/context/base/data-bin.lua
index f25b423e1..b18526c77 100644
--- a/tex/context/base/data-bin.lua
+++ b/tex/context/base/data-bin.lua
@@ -6,25 +6,22 @@ if not modules then modules = { } end modules ['data-bin'] = {
license = "see context related readme files"
}
-local unpack = unpack or table.unpack
-
local resolvers = resolvers
+local methodhandler = resolvers.methodhandler
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-
-function resolvers.findbinfile(filename, filetype)
- return resolvers.methodhandler('finders',filename, filetype)
+function resolvers.findbinfile(filename,filetype)
+ return methodhandler('finders',filename,filetype)
end
function resolvers.openbinfile(filename)
- return resolvers.methodhandler('loaders',filename)
+ return methodhandler('loaders',filename)
end
-function resolvers.loadbinfile(filename, filetype)
- local fname = resolvers.methodhandler('finders',filename, filetype)
+function resolvers.loadbinfile(filename,filetype)
+ local fname = methodhandler('finders',filename,filetype)
if fname and fname ~= "" then
return resolvers.openbinfile(fname)
else
- return unpack(loaders.notfound)
+ return resolvers.loaders.notfound()
end
end
diff --git a/tex/context/base/data-crl.lua b/tex/context/base/data-crl.lua
index 8fa947191..445bd5b0a 100644
--- a/tex/context/base/data-crl.lua
+++ b/tex/context/base/data-crl.lua
@@ -14,50 +14,48 @@ local resolvers = resolvers
local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-curl = curl or { }
-local curl = curl
+resolvers.curl = resolvers.curl or { }
+local curl = resolvers.curl
local cached = { }
-function curl.fetch(protocol, name) -- todo: use socket library
- local cleanname = gsub(name,"[^%a%d%.]+","-")
+local function runcurl(specification)
+ local original = specification.original
+ -- local scheme = specification.scheme
+ local cleanname = gsub(original,"[^%a%d%.]+","-")
local cachename = caches.setfirstwritablefile(cleanname,"curl")
- if not cached[name] then
+ if not cached[original] then
if not io.exists(cachename) then
- cached[name] = cachename
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
+ cached[original] = cachename
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original
os.spawn(command)
end
if io.exists(cachename) then
- cached[name] = cachename
+ cached[original] = cachename
else
- cached[name] = ""
+ cached[original] = ""
end
end
- return cached[name]
+ return cached[original]
end
-function finders.curl(protocol,filename)
- local foundname = curl.fetch(protocol, filename)
- return finders.generic(protocol,foundname,filetype)
-end
-
-function openers.curl(protocol,filename)
- return openers.generic(protocol,filename)
-end
+-- old code: we could be cleaner using specification (see schemes)
-function loaders.curl(protocol,filename)
- return loaders.generic(protocol,filename)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",runcurl(specification),filetype)
end
--- todo: metamethod
+local opener = openers.file
+local loader = loaders.file
-function curl.install(protocol)
- finders[protocol] = function (filename,filetype) return finders.curl(protocol,filename) end
- openers[protocol] = function (filename) return openers.curl(protocol,filename) end
- loaders[protocol] = function (filename) return loaders.curl(protocol,filename) end
+local function install(scheme)
+ finders[scheme] = finder
+ openers[scheme] = opener
+ loaders[scheme] = loader
end
-curl.install('http')
-curl.install('https')
-curl.install('ftp')
+resolvers.curl.install = install
+
+install('http')
+install('https')
+install('ftp')
diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua
index 98fbe9f95..22968df1a 100644
--- a/tex/context/base/data-exp.lua
+++ b/tex/context/base/data-exp.lua
@@ -6,10 +6,10 @@ if not modules then modules = { } end modules ['data-exp'] = {
license = "see context related readme files",
}
-local format, gsub, find, gmatch, lower = string.format, string.gsub, string.find, string.gmatch, string.lower
+local format, find, gmatch, lower = string.format, string.find, string.gmatch, string.lower
local concat, sort = table.concat, table.sort
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local lpegCt, lpegCs, lpegP, lpegC, lpegS = lpeg.Ct, lpeg.Cs, lpeg.P, lpeg.C, lpeg.S
+local Ct, Cs, Cc, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.P, lpeg.C, lpeg.S
local type, next = type, next
local ostype = os.type
@@ -24,7 +24,7 @@ local resolvers = resolvers
-- As this bit of code is somewhat special it gets its own module. After
-- all, when working on the main resolver code, I don't want to scroll
--- past this every time.
+-- past this every time. See data-obs.lua for the gsub variant.
-- {a,b,c,d}
-- a,b,c/{p,q,r},d
@@ -39,95 +39,70 @@ local resolvers = resolvers
-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}}
-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c}
--- this one is better and faster, but it took me a while to realize
--- that this kind of replacement is cleaner than messy parsing and
--- fuzzy concatenating we can probably gain a bit with selectively
--- applying lpeg, but experiments with lpeg parsing this proved not to
--- work that well; the parsing is ok, but dealing with the resulting
--- table is a pain because we need to work inside-out recursively
-
-local dummy_path_expr = "^!*unset/*$"
-
-local function do_first(a,b)
+local function f_first(a,b)
local t, n = { }, 0
for s in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = a .. s
+ n = n + 1 ; t[n] = a .. s
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_second(a,b)
+local function f_second(a,b)
local t, n = { }, 0
for s in gmatch(a,"[^,]+") do
- n = n + 1
- t[n] = s .. b
+ n = n + 1 ; t[n] = s .. b
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_both(a,b)
+local function f_both(a,b)
local t, n = { }, 0
for sa in gmatch(a,"[^,]+") do
for sb in gmatch(b,"[^,]+") do
- n = n + 1
- t[n] = sa .. sb
+ n = n + 1 ; t[n] = sa .. sb
end
end
- return "{" .. concat(t,",") .. "}"
+ return concat(t,",")
end
-local function do_three(a,b,c)
- return a .. b.. c
-end
+local left = P("{")
+local right = P("}")
+local var = P((1 - S("{}" ))^0)
+local set = P((1 - S("{},"))^0)
+local other = P(1)
-local stripper_1 = lpeg.stripper("{}@")
+local l_first = Cs( ( Cc("{") * (C(set) * left * C(var) * right / f_first) * Cc("}") + other )^0 )
+local l_second = Cs( ( Cc("{") * (left * C(var) * right * C(set) / f_second) * Cc("}") + other )^0 )
+local l_both = Cs( ( Cc("{") * (left * C(var) * right * left * C(var) * right / f_both) * Cc("}") + other )^0 )
+local l_rest = Cs( ( left * var * (left/"") * var * (right/"") * var * right + other )^0 )
-local replacer_1 = lpeg.replacer {
- { ",}", ",@}" },
- { "{,", "{@," },
-}
+local stripper_1 = lpeg.stripper ("{}@")
+local replacer_1 = lpeg.replacer { { ",}", ",@}" }, { "{,", "{@," }, }
-local function splitpathexpr(str, newlist, validate)
- -- no need for further optimization as it is only called a
- -- few times, we can use lpeg for the sub
+local function splitpathexpr(str, newlist, validate) -- I couldn't resist lpegging it (nice exercise).
if trace_expansions then
report_resolvers("expanding variable '%s'",str)
end
local t, ok, done = newlist or { }, false, false
local n = #t
str = lpegmatch(replacer_1,str)
- while true do
- done = false
- while true do
- str, ok = gsub(str,"([^{},]+){([^{}]+)}",do_first)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}([^{},]+)",do_second)
- if ok > 0 then done = true else break end
- end
- while true do
- str, ok = gsub(str,"{([^{}]+)}{([^{}]+)}",do_both)
- if ok > 0 then done = true else break end
- end
- str, ok = gsub(str,"({[^{}]*){([^{}]+)}([^{}]*})",do_three)
- if ok > 0 then done = true end
- if not done then break end
- end
+ repeat local old = str
+ repeat local old = str ; str = lpegmatch(l_first, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_second,str) until old == str
+ repeat local old = str ; str = lpegmatch(l_both, str) until old == str
+ repeat local old = str ; str = lpegmatch(l_rest, str) until old == str
+ until old == str -- or not find(str,"{")
str = lpegmatch(stripper_1,str)
if validate then
for s in gmatch(str,"[^,]+") do
s = validate(s)
if s then
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
else
for s in gmatch(str,"[^,]+") do
- n = n + 1
- t[n] = s
+ n = n + 1 ; t[n] = s
end
end
if trace_expansions then
@@ -138,71 +113,23 @@ local function splitpathexpr(str, newlist, validate)
return t
end
+-- We could make the previous one public.
+
local function validate(s)
- local isrecursive = find(s,"//$")
- s = collapsepath(s)
- if isrecursive then
- s = s .. "//"
- end
- return s ~= "" and not find(s,dummy_path_expr) and s
+ s = collapsepath(s) -- already keeps the //
+ return s ~= "" and not find(s,"^!*unset/*$") and s
end
resolvers.validatedpath = validate -- keeps the trailing //
-function resolvers.expandedpathfromlist(pathlist) -- maybe not a list, just a path
- -- a previous version fed back into pathlist
- local newlist, ok = { }, false
+function resolvers.expandedpathfromlist(pathlist)
+ local newlist = { }
for k=1,#pathlist do
- if find(pathlist[k],"[{}]") then
- ok = true
- break
- end
- end
- if ok then
- for k=1,#pathlist do
- splitpathexpr(pathlist[k],newlist,validate)
- end
- else
- local n = 0
- for k=1,#pathlist do
- for p in gmatch(pathlist[k],"([^,]+)") do
- p = validate(p)
- if p ~= "" then
- n = n + 1
- newlist[n] = p
- end
- end
- end
+ splitpathexpr(pathlist[k],newlist,validate)
end
return newlist
end
--- We also put some cleanup code here.
-
---~ local cleanup -- used recursively
---~ local homedir
-
---~ cleanup = lpeg.replacer {
---~ {
---~ "!",
---~ ""
---~ },
---~ {
---~ "\\",
---~ "/"
---~ },
---~ {
---~ "~" ,
---~ function()
---~ return lpegmatch(cleanup,environment.homedir)
---~ end
---~ },
---~ }
-
---~ function resolvers.cleanpath(str)
---~ return str and lpegmatch(cleanup,str)
---~ end
-
local cleanup = lpeg.replacer {
{ "!" , "" },
{ "\\" , "/" },
@@ -240,18 +167,13 @@ end
-- This one strips quotes and funny tokens.
---~ local stripper = lpegCs(
---~ lpegpatterns.unspacer * lpegpatterns.unsingle
---~ + lpegpatterns.undouble * lpegpatterns.unspacer
---~ )
-
-local expandhome = lpegP("~") / "$HOME" -- environment.homedir
+local expandhome = P("~") / "$HOME" -- environment.homedir
-local dodouble = lpegP('"')/"" * (expandhome + (1 - lpegP('"')))^0 * lpegP('"')/""
-local dosingle = lpegP("'")/"" * (expandhome + (1 - lpegP("'")))^0 * lpegP("'")/""
-local dostring = (expandhome + 1 )^0
+local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/""
+local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/""
+local dostring = (expandhome + 1 )^0
-local stripper = lpegCs(
+local stripper = Cs(
lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer
)
@@ -267,7 +189,9 @@ end
local cache = { }
-local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+local splitter = Ct(lpeg.splitat(S(ostype == "windows" and ";" or ":;"))) -- maybe add ,
+
+local backslashswapper = lpeg.replacer("\\","/")
local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification }
if str then
@@ -276,8 +200,7 @@ local function splitconfigurationpath(str) -- beware, this can be either a path
if str == "" then
found = { }
else
- str = gsub(str,"\\","/")
- local split = lpegmatch(splitter,str)
+ local split = lpegmatch(splitter,lpegmatch(backslashswapper,str)) -- can be combined
found = { }
local noffound = 0
for i=1,#split do
@@ -323,13 +246,13 @@ end
-- starting with . or .. etc or funny char
---~ local l_forbidden = lpegS("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
---~ local l_confusing = lpegP(" ")
+--~ local l_forbidden = S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t")
+--~ local l_confusing = P(" ")
--~ local l_character = lpegpatterns.utf8
---~ local l_dangerous = lpegP(".")
+--~ local l_dangerous = P(".")
---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpegP(-1)
---~ ----- l_normal = l_normal * lpegCc(true) + lpegCc(false)
+--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * P(-1)
+--~ ----- l_normal = l_normal * Cc(true) + Cc(false)
--~ local function test(str)
--~ print(str,lpegmatch(l_normal,str))
@@ -340,57 +263,62 @@ end
--~ test("ヒラギノ明朝 /Pro W3;")
--~ test("ヒラギノ明朝 Pro W3")
-local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
+local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t"))
-function resolvers.scanfiles(specification)
- if trace_locating then
- report_resolvers("scanning path '%s'",specification)
- end
- local attributes, directory = lfs.attributes, lfs.dir
- local files = { __path__ = specification }
- local n, m, r = 0, 0, 0
- local function scan(spec,path)
- local full = (path == "" and spec) or (spec .. path .. '/')
- local dirs = { }
- for name in directory(full) do
- if not lpegmatch(weird,name) then
- local mode = attributes(full..name,'mode')
- if mode == 'file' then
- n = n + 1
- local f = files[name]
- if f then
- if type(f) == 'string' then
- files[name] = { f, path }
- else
- f[#f+1] = path
- end
- else -- probably unique anyway
- files[name] = path
- local lower = lower(name)
- if name ~= lower then
- files["remap:"..lower] = name
- r = r + 1
- end
- end
- elseif mode == 'directory' then
- m = m + 1
- if path ~= "" then
- dirs[#dirs+1] = path..'/'..name
+local attributes, directory = lfs.attributes, lfs.dir
+
+local function scan(files,spec,path,n,m,r)
+ local full = (path == "" and spec) or (spec .. path .. '/')
+ local dirs, nofdirs = { }, 0
+ for name in directory(full) do
+ if not lpegmatch(weird,name) then
+ local mode = attributes(full..name,'mode')
+ if mode == 'file' then
+ n = n + 1
+ local f = files[name]
+ if f then
+ if type(f) == 'string' then
+ files[name] = { f, path }
else
- dirs[#dirs+1] = name
+ f[#f+1] = path
end
+ else -- probably unique anyway
+ files[name] = path
+ local lower = lower(name)
+ if name ~= lower then
+ files["remap:"..lower] = name
+ r = r + 1
+ end
+ end
+ elseif mode == 'directory' then
+ m = m + 1
+ nofdirs = nofdirs + 1
+ if path ~= "" then
+ dirs[nofdirs] = path..'/'..name
+ else
+ dirs[nofdirs] = name
end
end
end
- if #dirs > 0 then
- sort(dirs)
- for i=1,#dirs do
- scan(spec,dirs[i])
- end
+ end
+ if nofdirs > 0 then
+ sort(dirs)
+ for i=1,nofdirs do
+ files, n, m, r = scan(files,spec,dirs[i],n,m,r)
end
end
- scan(specification .. '/',"")
- files.__files__, files.__directories__, files.__remappings__ = n, m, r
+ return files, n, m, r
+end
+
+function resolvers.scanfiles(path)
+ if trace_locating then
+ report_resolvers("scanning path '%s'",path)
+ end
+ local files, n, m, r = scan({ },path .. '/',"",0,0,0)
+ files.__path__ = path
+ files.__files__ = n
+ files.__directories__ = m
+ files.__remappings__ = r
if trace_locating then
report_resolvers("%s files found on %s directories with %s uppercase remappings",n,m,r)
end
diff --git a/tex/context/base/data-fil.lua b/tex/context/base/data-fil.lua
new file mode 100644
index 000000000..5fad79afd
--- /dev/null
+++ b/tex/context/base/data-fil.lua
@@ -0,0 +1,112 @@
+if not modules then modules = { } end modules ['data-fil'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_resolvers = logs.new("resolvers")
+
+local resolvers = resolvers
+
+local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
+local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators
+
+local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
+
+function locators.file(specification)
+ local name = specification.filename
+ if name and name ~= '' and lfs.isdir(name) then
+ if trace_locating then
+ report_resolvers("file locator '%s' found",name)
+ end
+ resolvers.appendhash('file',name,true) -- cache
+ elseif trace_locating then
+ report_resolvers("file locator '%s' not found",name)
+ end
+end
+
+function hashers.file(specification)
+ local name = specification.filename
+ local content = caches.loadcontent(name,'files')
+ resolvers.registerfilehash(name,content,content==nil)
+end
+
+function generators.file(specification)
+ local name = specification.filename
+ local content = resolvers.scanfiles(name)
+ resolvers.registerfilehash(name,content,true)
+end
+
+concatinators.file = file.join
+
+function finders.file(specification,filetype)
+ local filename = specification.filename
+ local foundname = resolvers.findfile(filename,filetype)
+ if foundname and foundname ~= "" then
+ if trace_locating then
+ report_resolvers("file finder: '%s' found",filename)
+ end
+ return foundname
+ else
+ if trace_locating then
+ report_resolvers("file finder: %s' not found",filename)
+ end
+ return finders.notfound()
+ end
+end
+
+-- The default textopener will be overloaded later on.
+
+function openers.helpers.textopener(tag,filename,f)
+ return {
+ reader = function() return f:read () end,
+ close = function() return f:close() end,
+ }
+end
+
+function openers.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"r")
+ if f then
+ logs.show_open(filename) -- todo
+ if trace_locating then
+ report_resolvers("file opener, '%s' opened",filename)
+ end
+ return openers.helpers.textopener("file",filename,f)
+ end
+ end
+ if trace_locating then
+ report_resolvers("file opener, '%s' not found",filename)
+ end
+ return openers.notfound()
+end
+
+function loaders.file(specification,filetype)
+ local filename = specification.filename
+ if filename and filename ~= "" then
+ local f = io.open(filename,"rb")
+ if f then
+ logs.show_load(filename)
+ if trace_locating then
+ report_resolvers("file loader, '%s' loaded",filename)
+ end
+ local s = f:read("*a")
+ if checkgarbage then
+ checkgarbage(#s)
+ end
+ f:close()
+ if s then
+ return true, s, #s
+ end
+ end
+ end
+ if trace_locating then
+ report_resolvers("file loader, '%s' not found",filename)
+ end
+ return loaders.notfound()
+end
diff --git a/tex/context/base/data-inp.lua b/tex/context/base/data-inp.lua
index b3e30a6c6..97fb8904b 100644
--- a/tex/context/base/data-inp.lua
+++ b/tex/context/base/data-inp.lua
@@ -6,10 +6,20 @@ if not modules then modules = { } end modules ['data-inp'] = {
license = "see context related readme files"
}
-local allocate = utilities.storage.allocate
-
+local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.finders = allocate { notfound = { nil } }
-resolvers.openers = allocate { notfound = { nil } }
-resolvers.loaders = allocate { notfound = { false, nil, 0 } }
+local methodhandler = resolvers.methodhandler
+local registermethod = resolvers.registermethod
+
+local finders = allocate { helpers = { }, notfound = function() end }
+local openers = allocate { helpers = { }, notfound = function() end }
+local loaders = allocate { helpers = { }, notfound = function() return false, nil, 0 end }
+
+registermethod("finders", finders, "uri")
+registermethod("openers", openers, "uri")
+registermethod("loaders", loaders, "uri")
+
+resolvers.finders = finders
+resolvers.openers = openers
+resolvers.loaders = loaders
diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua
index fc44e5508..9509c97f0 100644
--- a/tex/context/base/data-lua.lua
+++ b/tex/context/base/data-lua.lua
@@ -68,7 +68,7 @@ local function loaded(libpaths,name,simple)
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.path': '%s' => '%s'",simple,libpath,resolved)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.path': '%s'",name,resolved)
end
@@ -77,7 +77,6 @@ local function loaded(libpaths,name,simple)
end
end
-
package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! locating '%s'",name)
@@ -115,7 +114,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- mode detail
report_resolvers("! checking for '%s' using 'clibformat path': '%s'",libname,path)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'clibformat': '%s'",libname,resolved)
end
@@ -129,7 +128,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1]
if trace_locating then -- more detail
report_resolvers("! checking for '%s' on 'package.cpath': '%s'",simple,libpath)
end
- if resolvers.isreadable.file(resolved) then
+ if file.is_readable(resolved) then
if trace_locating then
report_resolvers("! lib '%s' located via 'package.cpath': '%s'",name,resolved)
end
diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua
index 908dbf199..a02e7eeb4 100644
--- a/tex/context/base/data-met.lua
+++ b/tex/context/base/data-met.lua
@@ -6,9 +6,16 @@ if not modules then modules = { } end modules ['data-met'] = {
license = "see context related readme files"
}
-local find = string.find
+local find, format = string.find, string.format
+local sequenced = table.sequenced
+local addurlscheme, urlhashed = url.addscheme, url.hashed
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+local trace_locating = false
+
+trackers.register("resolvers.locating", function(v) trace_methods = v end)
+trackers.register("resolvers.methods", function(v) trace_methods = v end)
+
+--~ trace_methods = true
local report_resolvers = logs.new("resolvers")
@@ -16,38 +23,106 @@ local allocate = utilities.storage.allocate
local resolvers = resolvers
-resolvers.concatinators = allocate ()
-resolvers.locators = allocate { notfound = { nil } } -- locate databases
-resolvers.hashers = allocate { notfound = { nil } } -- load databases
-resolvers.generators = allocate { notfound = { nil } } -- generate databases
+local registered = { }
-function resolvers.splitmethod(filename) -- todo: trigger by suffix
+local function splitmethod(filename) -- todo: filetype in specification
if not filename then
- return { } -- safeguard
- elseif type(filename) == "table" then
+ return { scheme = "unknown", original = filename }
+ end
+ if type(filename) == "table" then
return filename -- already split
- elseif not find(filename,"://") then
- return { scheme="file", path = filename, original = filename } -- quick hack
+ end
+ filename = file.collapsepath(filename)
+ if not find(filename,"://") then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
+ end
+ local specification = url.hashed(filename)
+ if not specification.scheme or specification.scheme == "" then
+ return { scheme = "file", path = filename, original = filename, filename = filename }
else
- return url.hashed(filename)
+ return specification
end
end
-function resolvers.methodhandler(what, filename, filetype) -- ...
- filename = file.collapsepath(filename)
- local specification = (type(filename) == "string" and resolvers.splitmethod(filename)) or filename -- no or { }, let it bomb
- local scheme = specification.scheme
- local resolver = resolvers[what]
- if resolver[scheme] then
- if trace_locating then
- report_resolvers("using special handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+resolvers.splitmethod = splitmethod -- bad name but ok
+
+-- the second argument is always analyzed (saves time later on) and the original
+-- gets passed as original but also as argument
+
+local function methodhandler(what,first,...) -- filename can be nil or false
+ local method = registered[what]
+ if method then
+ local how, namespace = method.how, method.namespace
+ if how == "uri" or how == "url" then
+ local specification = splitmethod(first)
+ local scheme = specification.scheme
+ local resolver = namespace and namespace[scheme]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, scheme=%s, argument=%s",what,how,scheme,first)
+ end
+ return resolver(specification,...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default, argument=%s",what,how,first)
+ end
+ return resolver(specification,...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, no handler",what,how)
+ end
+ end
+ elseif how == "tag" then
+ local resolver = namespace and namespace[first]
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, tag=%s",what,how,first)
+ end
+ return resolver(...)
+ else
+ resolver = namespace.default or namespace.file
+ if resolver then
+ if trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, default",what,how)
+ end
+ return resolver(...)
+ elseif trace_methods then
+ report_resolvers("resolver: method=%s, how=%s, unknown",what,how)
+ end
+ end
end
- return resolver[scheme](filename,filetype,specification) -- todo: query
else
- if trace_locating then
- report_resolvers("no handler for '%s' -> '%s' -> '%s'",specification.original,what,table.sequenced(specification))
+ report_resolvers("resolver: method=%s, unknown",what)
+ end
+end
+
+resolvers.methodhandler = methodhandler
+
+function resolvers.registermethod(name,namespace,how)
+ registered[name] = { how = how or "tag", namespace = namespace }
+ namespace["byscheme"] = function(scheme,filename,...)
+ if scheme == "file" then
+ return methodhandler(name,filename,...)
+ else
+ return methodhandler(name,addurlscheme(filename,scheme),...)
end
- return resolver.tex(filename,filetype) -- todo: specification
end
end
+local concatinators = allocate { notfound = file.join } -- concatinate paths
+local locators = allocate { notfound = function() end } -- locate databases
+local hashers = allocate { notfound = function() end } -- load databases
+local generators = allocate { notfound = function() end } -- generate databases
+
+resolvers.concatinators = concatinators
+resolvers.locators = locators
+resolvers.hashers = hashers
+resolvers.generators = generators
+
+local registermethod = resolvers.registermethod
+
+registermethod("concatinators",concatinators,"tag")
+registermethod("locators", locators, "uri")
+registermethod("hashers", hashers, "uri")
+registermethod("generators", generators, "uri")
diff --git a/tex/context/base/data-out.lua b/tex/context/base/data-out.lua
index 598f857f8..11304c2ce 100644
--- a/tex/context/base/data-out.lua
+++ b/tex/context/base/data-out.lua
@@ -6,5 +6,13 @@ if not modules then modules = { } end modules ['data-out'] = {
license = "see context related readme files"
}
-resolvers.savers = utilities.storage.allocate { }
+local allocate = utilities.storage.allocate
+local resolvers = resolvers
+local registermethod = resolvers.registermethod
+
+local savers = allocate { helpers = { } }
+
+resolvers.savers = savers
+
+registermethod("savers", savers, "uri")
diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua
index d45597093..408aec83c 100644
--- a/tex/context/base/data-res.lua
+++ b/tex/context/base/data-res.lua
@@ -19,11 +19,11 @@ local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys
local next, type = next, type
local os = os
-local lpegP, lpegS, lpegR, lpegC, lpegCc, lpegCs, lpegCt = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct
+local P, S, R, C, Cc, Cs, Ct, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Carg
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join
-local collapsepath = file.collapsepath
+local collapsepath, joinpath = file.collapsepath, file.joinpath
local allocate = utilities.storage.allocate
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -37,6 +37,7 @@ local resolvers = resolvers
local expandedpathfromlist = resolvers.expandedpathfromlist
local checkedvariable = resolvers.checkedvariable
local splitconfigurationpath = resolvers.splitconfigurationpath
+local methodhandler = resolvers.methodhandler
local initializesetter = utilities.setters.initialize
@@ -50,12 +51,12 @@ resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local
resolvers.luacnfname = 'texmfcnf.lua'
resolvers.luacnfstate = "unknown"
-local unset_variable = "unset"
+local unset_variable = "unset"
-local formats = resolvers.formats
-local suffixes = resolvers.suffixes
-local dangerous = resolvers.dangerous
-local suffixmap = resolvers.suffixmap
+local formats = resolvers.formats
+local suffixes = resolvers.suffixes
+local dangerous = resolvers.dangerous
+local suffixmap = resolvers.suffixmap
resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
@@ -100,7 +101,7 @@ function resolvers.newinstance()
end
-function resolvers.setinstance(someinstance)
+function resolvers.setinstance(someinstance) -- only one instance is active
instance = someinstance
resolvers.instance = someinstance
return someinstance
@@ -122,7 +123,7 @@ function resolvers.setenv(key,value)
end
end
-function resolvers.getenv(key)
+local function getenv(key)
local value = instance.environment[key]
if value and value ~= "" then
return value
@@ -132,23 +133,85 @@ function resolvers.getenv(key)
end
end
-resolvers.env = resolvers.getenv
+resolvers.getenv = getenv
+resolvers.env = getenv
+
+local function resolve(key)
+ local value = instance.variables[key] or ""
+ return (value ~= "" and value) or getenv(key) or ""
+end
+
+local dollarstripper = lpeg.stripper("$")
+local inhibitstripper = P("!")^0 * Cs(P(1)^0)
+local backslashswapper = lpeg.replacer("\\","/")
+
+local somevariable = P("$") / ""
+local somekey = C(R("az","AZ","09","__","--")^1)
+local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "")
+ + P(";") * (P(";") / "")
+ + P(1)
+
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
local function expandvars(lst) -- simple vars
- local variables, getenv = instance.variables, resolvers.getenv
- local function resolve(a)
- local va = variables[a] or ""
- return (va ~= "" and va) or getenv(a) or ""
- end
for k=1,#lst do
- local var = lst[k]
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- lst[k] = var
+ local lk = lst[k]
+ lst[k] = lpegmatch(pattern,lk) or lk
+ end
+end
+
+--~ local function expandvars(lst) -- simple vars
+--~ local variables, getenv = instance.variables, resolvers.getenv
+--~ local function resolve(a)
+--~ local va = variables[a] or ""
+--~ return (va ~= "" and va) or getenv(a) or ""
+--~ end
+--~ for k=1,#lst do
+--~ local var = lst[k]
+--~ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+--~ var = gsub(var,";+",";")
+--~ var = gsub(var,";[!{}/\\]+;",";")
+--~ lst[k] = var
+--~ end
+--~ end
+
+local slash = P("/")
+
+local pattern = Cs (
+ Cc("^") * (
+ Cc("%") * S(".-")
+ + slash^2 * P(-1) / "/.*"
+ + slash^2 / "/.-/"
+ + (1-slash) * P(-1) * Cc("/")
+ + P(1)
+ )^1 * Cc("$")
+)
+
+local function makepathexpression(str)
+ if str == "." then
+ return "^%./$"
+ else
+ return lpegmatch(pattern,str)
end
end
+--~ local function makepathexpression(str)
+--~ if str == "." then
+--~ return "^%./$"
+--~ else
+--~ local expression
+--~ if not find(str,"/$") then
+--~ expression = str .. "/"
+--~ else
+--~ expression = str
+--~ end
+--~ expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
+--~ expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
+--~ expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
+--~ return "^" .. expression .. "$"
+--~ end
+--~ end
+
local function resolve(key)
local value = instance.variables[key]
if value and value ~= "" then
@@ -162,22 +225,27 @@ local function resolve(key)
return e ~= nil and e ~= "" and checkedvariable(e) or ""
end
+local pattern = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 )
+
local function expandedvariable(var) -- simple vars
- var = gsub(var,"%$([%a%d%_%-]+)",resolve)
- var = gsub(var,";+",";")
- var = gsub(var,";[!{}/\\]+;",";")
- return var
+ return lpegmatch(pattern,var) or var
end
+--~ local function expandedvariable(var) -- simple vars
+--~ var = gsub(var,"%$([%a%d%_%-]+)",resolve)
+--~ var = gsub(var,";+",";")
+--~ var = gsub(var,";[!{}/\\]+;",";")
+--~ return var
+--~ end
+
local function entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- local result = entries[name..'.'..instance.progname] or entries[name]
+ name = lpegmatch(dollarstripper,name)
local result = entries[instance.progname .. '.' .. name] or entries[name]
if result then
return result
else
- result = resolvers.getenv(name)
+ result = getenv(name)
if result then
instance.variables[name] = result
resolvers.expandvariables()
@@ -190,8 +258,7 @@ end
local function is_entry(entries,name)
if name and name ~= "" then
- name = gsub(name,'%$','')
- -- return (entries[name..'.'..instance.progname] or entries[name]) ~= nil
+ name = lpegmatch(dollarstripper,name)
return (entries[instance.progname .. '.' .. name] or entries[name]) ~= nil
else
return false
@@ -202,7 +269,7 @@ local function reportcriticalvariables()
if trace_locating then
for i=1,#resolvers.criticalvars do
local v = resolvers.criticalvars[i]
- report_resolvers("variable '%s' set to '%s'",v,resolvers.getenv(v) or "unknown")
+ report_resolvers("variable '%s' set to '%s'",v,getenv(v) or "unknown")
end
report_resolvers()
end
@@ -212,7 +279,7 @@ end
local function identify_configuration_files()
local specification = instance.specification
if #specification == 0 then
- local cnfspec = resolvers.getenv('TEXMFCNF')
+ local cnfspec = getenv('TEXMFCNF')
if cnfspec == "" then
cnfspec = resolvers.luacnfspec
resolvers.luacnfstate = "default"
@@ -284,7 +351,6 @@ local function load_configuration_files()
end
end
setups[pathname] = t
-
if resolvers.luacnfstate == "default" then
-- the following code is not tested
local cnfspec = t["TEXMFCNF"]
@@ -346,63 +412,30 @@ end
-- database loading
--- locators
-
-function resolvers.locatedatabase(specification)
- return resolvers.methodhandler('locators', specification)
-end
-
-function resolvers.locators.tex(specification)
- if specification and specification ~= '' and lfs.isdir(specification) then
- if trace_locating then
- report_resolvers("tex locator '%s' found",specification)
- end
- resolvers.appendhash('file',specification,filename,true) -- cache
- elseif trace_locating then
- report_resolvers("tex locator '%s' not found",specification)
- end
-end
-
--- hashers
-
-function resolvers.hashdatabase(tag,name)
- return resolvers.methodhandler('hashers',tag,name)
-end
-
local function load_file_databases()
instance.loaderror, instance.files = false, allocate()
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- resolvers.hashdatabase(hash.tag,hash.name)
+ resolvers.hashers.byscheme(hash.type,hash.name)
if instance.loaderror then break end
end
end
end
-function resolvers.hashers.tex(tag,name) -- used where?
- local content = caches.loadcontent(tag,'files')
- if content then
- instance.files[tag] = content
- else
- instance.files[tag] = { }
- instance.loaderror = true
- end
-end
-
local function locate_file_databases()
-- todo: cache:// and tree:// (runtime)
local texmfpaths = resolvers.expandedpathlist('TEXMF')
for i=1,#texmfpaths do
local path = collapsepath(texmfpaths[i])
- local stripped = gsub(path,"^!!","")
- local runtime = stripped == path
- path = resolvers.cleanpath(path)
+ local stripped = lpegmatch(inhibitstripper,path)
if stripped ~= "" then
+ local runtime = stripped == path
+ path = resolvers.cleanpath(path)
if lfs.isdir(path) then
local spec = resolvers.splitmethod(stripped)
- if spec.scheme == "cache" then
+ if spec.scheme == "cache" or spec.scheme == "file" then
stripped = spec.path
elseif runtime and (spec.noscheme or spec.scheme == "file") then
stripped = "tree:///" .. stripped
@@ -414,7 +447,7 @@ local function locate_file_databases()
report_resolvers("locating list of '%s' (cached)",path)
end
end
- resolvers.locatedatabase(stripped) -- nothing done with result
+ methodhandler('locators',stripped) -- nothing done with result
else
if trace_locating then
if runtime then
@@ -433,8 +466,9 @@ end
local function generate_file_databases()
local hashes = instance.hashes
- for i=1,#hashes do
- resolvers.methodhandler('generators',hashes[i].tag)
+ for k=1,#hashes do
+ local hash = hashes[k]
+ methodhandler('generators',hash.name)
end
if trace_locating then
report_resolvers()
@@ -444,10 +478,13 @@ end
local function save_file_databases() -- will become cachers
for i=1,#instance.hashes do
local hash = instance.hashes[i]
- local cachename = hash.tag
+ local cachename = hash.name
if hash.cache then
local content = instance.files[cachename]
caches.collapsecontent(content)
+ if trace_locating then
+ report_resolvers("saving tree '%s'",cachename)
+ end
caches.savecontent(cachename,"files",content)
elseif trace_locating then
report_resolvers("not saving runtime tree '%s'",cachename)
@@ -471,23 +508,22 @@ local function load_databases()
end
end
-function resolvers.appendhash(type,tag,name,cache)
+function resolvers.appendhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' appended",tag)
+ report_resolvers("hash '%s' appended",name)
end
- insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, { type = type, name = name, cache = cache } )
end
-function resolvers.prependhash(type,tag,name,cache)
+function resolvers.prependhash(type,name,cache)
if trace_locating then
- report_resolvers("hash '%s' prepended",tag)
+ report_resolvers("hash '%s' prepended",name)
end
- insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } )
+ insert(instance.hashes, 1, { type = type, name = name, cache = cache } )
end
function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash
--- local t = resolvers.expandedpathlist('TEXMF') -- full expansion
- local t = resolvers.splitpath(resolvers.getenv('TEXMF'))
+ local t = resolvers.splitpath(getenv('TEXMF'))
insert(t,1,specification)
local newspec = concat(t,";")
if instance.environment["TEXMF"] then
@@ -501,10 +537,6 @@ function resolvers.extendtexmfvariable(specification) -- crap, we could better p
reset_hashes()
end
-function resolvers.generators.tex(specification,tag)
- instance.files[tag or specification] = resolvers.scanfiles(specification)
-end
-
function resolvers.splitexpansions()
local ie = instance.expansions
for k,v in next, ie do
@@ -534,9 +566,20 @@ function resolvers.datastate()
return caches.contentstate()
end
+local function resolve(a)
+ return instance.expansions[a] or getenv(a)
+end
+
+local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";"
+
+local variable = R("az","AZ","09","__","--")^1 / resolve
+ variable = (P("$")/"") * (variable + (P("{")/"") * variable * (P("}")/""))
+
+ cleaner = Cs((cleaner + P(1))^0)
+ variable = Cs((variable + P(1))^0)
+
function resolvers.expandvariables()
local expansions, environment, variables = allocate(), instance.environment, instance.variables
- local getenv = resolvers.getenv
instance.expansions = expansions
local engine, progname = instance.engine, instance.progname
if type(engine) ~= "string" then instance.engine, engine = "", "" end
@@ -544,12 +587,7 @@ function resolvers.expandvariables()
if engine ~= "" then environment['engine'] = engine end
if progname ~= "" then environment['progname'] = progname end
for k,v in next, environment do
- -- local a, b = match(k,"^(%a+)%_(.*)%s*$") -- too many vars have an _ in the name
- -- if a and b then -- so let's forget about it; it was a
- -- expansions[a..'.'..b] = v -- hack anyway for linux and not needed
- -- else -- anymore as we now have directives
- expansions[k] = v
- -- end
+ expansions[k] = v
end
for k,v in next, environment do -- move environment to expansions (variables are already in there)
if not expansions[k] then expansions[k] = v end
@@ -557,26 +595,19 @@ function resolvers.expandvariables()
for k,v in next, variables do -- move variables to expansions
if not expansions[k] then expansions[k] = v end
end
- local busy = false
- local function resolve(a)
- busy = true
- return expansions[a] or getenv(a)
- end
- while true do
- busy = false
+ repeat
+ local busy = false
for k,v in next, expansions do
- local s, n = gsub(v,"%$([%a%d%_%-]+)",resolve)
- local s, m = gsub(s,"%$%{([%a%d%_%-]+)%}",resolve)
- if n > 0 or m > 0 then
- s = gsub(s,";+",";")
- s = gsub(s,";[!{}/\\]+;",";")
- expansions[k]= s
+ local s = lpegmatch(variable,v)
+ if s ~= v then
+ busy = true
+ expansions[k] = s
end
end
- if not busy then break end
- end
+ until not busy
+
for k,v in next, expansions do
- expansions[k] = gsub(v,"\\", '/')
+ expansions[k] = lpegmatch(cleaner,v)
end
end
@@ -603,7 +634,7 @@ function resolvers.unexpandedpathlist(str)
end
function resolvers.unexpandedpath(str)
- return file.joinpath(resolvers.unexpandedpathlist(str))
+ return joinpath(resolvers.unexpandedpathlist(str))
end
local done = { }
@@ -717,7 +748,7 @@ function resolvers.cleanpathlist(str)
end
function resolvers.expandpath(str)
- return file.joinpath(resolvers.expandedpathlist(str))
+ return joinpath(resolvers.expandedpathlist(str))
end
function resolvers.expandedpathlist(str)
@@ -725,7 +756,7 @@ function resolvers.expandedpathlist(str)
return ep or { } -- ep ?
elseif instance.savelists then
-- engine+progname hash
- str = gsub(str,"%$","")
+ str = lpegmatch(dollarstripper,str)
if not instance.lists[str] then -- cached
local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str)))
instance.lists[str] = expandedpathfromlist(lst)
@@ -738,28 +769,34 @@ function resolvers.expandedpathlist(str)
end
function resolvers.expandedpathlistfromvariable(str) -- brrr
- local tmp = resolvers.variableofformatorsuffix(gsub(str,"%$",""))
- if tmp ~= "" then
- return resolvers.expandedpathlist(tmp)
- else
- return resolvers.expandedpathlist(str)
- end
+ str = lpegmatch(dollarstripper,str)
+ local tmp = resolvers.variableofformatorsuffix(str)
+ return resolvers.expandedpathlist(tmp ~= "" and tmp or str)
end
function resolvers.expandpathfromvariable(str)
- return file.joinpath(resolvers.expandedpathlistfromvariable(str))
+ return joinpath(resolvers.expandedpathlistfromvariable(str))
end
function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
local ori = resolvers.variable(str)
local pth = expandedpathfromlist(resolvers.splitpath(ori))
- return file.joinpath(pth)
+ return joinpath(pth)
end
-resolvers.isreadable = { }
+function resolvers.registerfilehash(name,content,someerror)
+ if content then
+ instance.files[name] = content
+ else
+ instance.files[name] = { }
+ if somerror == true then -- can be unset
+ instance.loaderror = someerror
+ end
+ end
+end
-function resolvers.isreadable.file(name)
- local readable = lfs.isfile(name) -- brrr
+function isreadable(name)
+ local readable = file.is_readable(name)
if trace_detail then
if readable then
report_resolvers("file '%s' is readable",name)
@@ -770,8 +807,6 @@ function resolvers.isreadable.file(name)
return readable
end
-resolvers.isreadable.tex = resolvers.isreadable.file
-
-- name
-- name/name
@@ -792,7 +827,7 @@ local function collect_files(names)
local hashes = instance.hashes
for h=1,#hashes do
local hash = hashes[h]
- local blobpath = hash.tag
+ local blobpath = hash.name
local files = blobpath and instance.files[blobpath]
if files then
if trace_detail then
@@ -813,7 +848,7 @@ local function collect_files(names)
if not dname or find(blobfile,dname) then
local kind = hash.type
local search = filejoin(blobpath,blobfile,bname)
- local result = resolvers.concatinators[hash.type](blobroot,blobfile,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -826,7 +861,7 @@ local function collect_files(names)
if not dname or find(vv,dname) then
local kind = hash.type
local search = filejoin(blobpath,vv,bname)
- local result = resolvers.concatinators[hash.type](blobroot,vv,bname)
+ local result = methodhandler('concatinators',hash.type,blobroot,vv,bname)
if trace_detail then
report_resolvers("match: kind '%s', search '%s', result '%s'",kind,search,result)
end
@@ -864,6 +899,8 @@ local function can_be_dir(name) -- can become local
return fakepaths[name] == 1
end
+local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
+
local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc)
local result = { }
local stamp = nil
@@ -881,7 +918,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
end
if not dangerous[askedformat] then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_detail then
report_resolvers("file '%s' found directly",filename)
end
@@ -897,7 +934,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
result = resolvers.findwildcardfiles(filename) -- we can use th elocal
elseif file.is_qualified_path(filename) then
- if resolvers.isreadable.file(filename) then
+ if isreadable(filename) then
if trace_locating then
report_resolvers("qualified name '%s'", filename)
end
@@ -910,7 +947,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for i=1,#format_suffixes do
local s = format_suffixes[i]
forcedname = filename .. "." .. s
- if resolvers.isreadable.file(forcedname) then
+ if isreadable(forcedname) then
if trace_locating then
report_resolvers("no suffix, forcing format filetype '%s'", s)
end
@@ -924,7 +961,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
local basename = filebasename(filename)
- local pattern = gsub(filename .. "$","([%.%-])","%%%1")
+ local pattern = lpegmatch(preparetreepattern,filename)
-- messy .. to be sorted out
local savedformat = askedformat
local format = savedformat or ""
@@ -1022,7 +1059,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
for k=1,#wantedfiles do
local fname = wantedfiles[k]
- if fname and resolvers.isreadable.file(fname) then
+ if fname and isreadable(fname) then
filename, done = fname, true
result[#result+1] = filejoin('.',fname)
break
@@ -1048,26 +1085,15 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
if trace_detail then
report_resolvers("checking filename '%s'",filename)
end
- -- a bit messy ... esp the doscan setting here
- local doscan
for k=1,#pathlist do
local path = pathlist[k]
- if find(path,"^!!") then doscan = false else doscan = true end
- local pathname = gsub(path,"^!+", '')
+ local pathname = lpegmatch(inhibitstripper,path)
+ local doscan = path == pathname -- no ^!!
done = false
-- using file list
if filelist then
- local expression
-- compare list entries with permitted pattern -- /xx /xx//
- if not find(pathname,"/$") then
- expression = pathname .. "/"
- else
- expression = pathname
- end
- expression = gsub(expression,"([%-%.])","%%%1") -- this also influences
- expression = gsub(expression,"//+$", '/.*') -- later usage of pathname
- expression = gsub(expression,"//", '/.-/') -- not ok for /// but harmless
- expression = "^" .. expression .. "$"
+ local expression = makepathexpression(pathname)
if trace_detail then
report_resolvers("using pattern '%s' for path '%s'",expression,pathname)
end
@@ -1096,7 +1122,8 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
end
if not done and doscan then
-- check if on disk / unchecked / does not work at all / also zips
- if resolvers.splitmethod(pathname).scheme == 'file' then -- ?
+ local scheme = url.hasscheme(pathname)
+ if not scheme or scheme == "file" then
local pname = gsub(pathname,"%.%*$",'')
if not find(pname,"%*") then
local ppname = gsub(pname,"/+$","")
@@ -1104,7 +1131,7 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
for k=1,#wantedfiles do
local w = wantedfiles[k]
local fname = filejoin(ppname,w)
- if resolvers.isreadable.file(fname) then
+ if isreadable(fname) then
if trace_detail then
report_resolvers("found '%s' by scanning",fname)
end
@@ -1137,9 +1164,6 @@ local function collect_instance_files(filename,askedformat,allresults) -- todo :
return result
end
-resolvers.concatinators.tex = filejoin
-resolvers.concatinators.file = resolvers.concatinators.tex
-
local function findfiles(filename,filetype,allresults)
local result = collect_instance_files(filename,filetype or "",allresults)
if #result == 0 then
@@ -1160,7 +1184,7 @@ function resolvers.findfile(filename,filetype)
end
function resolvers.findpath(filename,filetype)
- return file.dirname(findfiles(filename,filetype,false)[1] or "")
+ return filedirname(findfiles(filename,filetype,false)[1] or "")
end
local function findgivenfiles(filename,allresults)
@@ -1168,7 +1192,7 @@ local function findgivenfiles(filename,allresults)
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local files = instance.files[hash.tag] or { }
+ local files = instance.files[hash.name] or { }
local blist = files[bname]
if not blist then
local rname = "remap:"..bname
@@ -1180,12 +1204,12 @@ local function findgivenfiles(filename,allresults)
end
if blist then
if type(blist) == 'string' then
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,blist,bname) or ""
if not allresults then break end
else
for kk=1,#blist do
local vv = blist[kk]
- result[#result+1] = resolvers.concatinators[hash.type](hash.tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',hash.type,hash.name,vv,bname) or ""
if not allresults then break end
end
end
@@ -1208,14 +1232,14 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
if type(blist) == 'string' then
-- make function and share code
if find(lower(blist),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,blist,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,blist,bname) or ""
done = true
end
else
for kk=1,#blist do
local vv = blist[kk]
if find(lower(vv),path) then
- result[#result+1] = resolvers.concatinators[kind](tag,vv,bname) or ""
+ result[#result+1] = methodhandler('concatinators',kind,tag,vv,bname) or ""
done = true
if not allresults then break end
end
@@ -1225,30 +1249,25 @@ local function doit(path,blist,bname,tag,kind,result,allresults)
return done
end
+local makewildcard = Cs(
+ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*"
+ + (P("^")^0 * P("/") / "") * (P("*")/".*" + P("-")/"%%-" + P("?")/"."+ P("\\")/"/" + P(1))^0
+)
+
local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local result = { }
- local bname, dname = filebasename(filename), filedirname(filename)
- local path = gsub(dname,"^*/","")
- path = gsub(path,"*",".*")
- path = gsub(path,"-","%%-")
- if dname == "" then
- path = ".*"
- end
- local name = bname
- name = gsub(name,"*",".*")
- name = gsub(name,"-","%%-")
- path = lower(path)
- name = lower(name)
+ local path = lower(lpegmatch(makewildcard,filedirname (filename)))
+ local name = lower(lpegmatch(makewildcard,filebasename(filename)))
local files, done = instance.files, false
if find(name,"%*") then
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- for kk, hh in next, files[hash.tag] do
+ local hashname, hashtype = hash.name, hash.type
+ for kk, hh in next, files[hashname] do
if not find(kk,"^remap:") then
if find(lower(kk),name) then
- if doit(path,hh,kk,tag,kind,result,allresults) then done = true end
+ if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -1258,8 +1277,8 @@ local function findwildcardfiles(filename,allresults) -- todo: remap: and lpeg
local hashes = instance.hashes
for k=1,#hashes do
local hash = hashes[k]
- local tag, kind = hash.tag, hash.type
- if doit(path,files[tag][bname],bname,tag,kind,result,allresults) then done = true end
+ local hashname, hashtype = hash.name, hash.type
+ if doit(path,files[hashname][bname],bname,hashname,hashtype,result,allresults) then done = true end
if done and not allresults then break end
end
end
@@ -1330,12 +1349,9 @@ end
-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING.
function resolvers.showpath(str) -- output search path for file type NAME
- return file.joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
+ return joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str)))
end
--- resolvers.findfile(filename)
--- resolvers.findfile(filename, f.iletype)
-
function resolvers.registerfile(files, name, path)
if files[name] then
if type(files[name]) == 'string' then
@@ -1360,7 +1376,7 @@ function resolvers.dowithvariable(name,func)
end
function resolvers.locateformat(name)
- local barename = gsub(name,"%.%a+$","")
+ local barename = file.removesuffix(name) -- gsub(name,"%.%a+$","")
local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or ""
if fmtname == "" then
fmtname = resolvers.findfile(barename..".fmt")
@@ -1396,7 +1412,7 @@ function resolvers.dowithfilesintree(pattern,handle,before,after) -- can be a ni
for i=1,#hashes do
local hash = hashes[i]
local blobtype = hash.type
- local blobpath = hash.tag
+ local blobpath = hash.name
if blobpath then
if before then
before(blobtype,blobpath,pattern)
diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua
index 655cd8209..cac4baab7 100644
--- a/tex/context/base/data-sch.lua
+++ b/tex/context/base/data-sch.lua
@@ -23,86 +23,84 @@ schemes.threshold = 24 * 60 * 60
directives.register("schemes.threshold", function(v) schemes.threshold = tonumber(v) or schemes.threshold end)
-local cached, loaded, reused, thresholds = { }, { }, { }, { }
+local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { }
-function schemes.curl(name,cachename) -- will use sockets instead or the curl library
- local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name -- no protocol .. "://"
+local function runcurl(name,cachename) -- will use sockets instead or the curl library
+ local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name
os.spawn(command)
end
-function schemes.fetch(protocol,name,handler)
- local cleanname = gsub(name,"[^%a%d%.]+","-")
+local function fetch(specification)
+ local original = specification.original
+ local scheme = specification.scheme
+ local cleanname = gsub(original,"[^%a%d%.]+","-")
local cachename = caches.setfirstwritablefile(cleanname,"schemes")
- if not cached[name] then
+ if not cached[original] then
statistics.starttiming(schemes)
if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) >
(thresholds[protocol] or schemes.threshold)) then
- cached[name] = cachename
+ cached[original] = cachename
+ local handler = handlers[scheme]
if handler then
if trace_schemes then
- report_schemes("fetching '%s', protocol '%s', method 'built-in'",name,protocol)
+ report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme)
end
io.flush()
- handler(protocol,name,cachename)
+ handler(specification,cachename)
else
if trace_schemes then
- report_schemes("fetching '%s', protocol '%s', method 'curl'",name,protocol)
+ report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme)
end
io.flush()
- schemes.curl(name,cachename)
+ runcurl(original,cachename)
end
end
if io.exists(cachename) then
- cached[name] = cachename
+ cached[original] = cachename
if trace_schemes then
- report_schemes("using cached '%s', protocol '%s', cachename '%s'",name,protocol,cachename)
+ report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename)
end
else
- cached[name] = ""
+ cached[original] = ""
if trace_schemes then
- report_schemes("using missing '%s', protocol '%s'",name,protocol)
+ report_schemes("using missing '%s', protocol '%s'",original,scheme)
end
end
- loaded[protocol] = loaded[protocol] + 1
+ loaded[scheme] = loaded[scheme] + 1
statistics.stoptiming(schemes)
else
if trace_schemes then
- report_schemes("reusing '%s', protocol '%s'",name,protocol)
+ report_schemes("reusing '%s', protocol '%s'",original,scheme)
end
- reused[protocol] = reused[protocol] + 1
+ reused[scheme] = reused[scheme] + 1
end
- return cached[name]
+ return cached[original]
end
-function finders.schemes(protocol,filename,handler)
- local foundname = schemes.fetch(protocol,filename,handler)
- return finders.generic(protocol,foundname)
+local function finder(specification,filetype)
+ return resolvers.methodhandler("finders",fetch(specification),filetype)
end
-function openers.schemes(protocol,filename)
- return openers.generic(protocol,filename)
+local opener = openers.file
+local loader = loaders.file
+
+local function install(scheme,handler,threshold)
+ handlers [scheme] = handler
+ loaded [scheme] = 0
+ reused [scheme] = 0
+ finders [scheme] = finder
+ openers [scheme] = opener
+ loaders [scheme] = loader
+ thresholds[scheme] = threshold or schemes.threshold
end
-function loaders.schemes(protocol,filename)
- return loaders.generic(protocol,filename)
-end
-
--- could be metatable and proper subtables
+schemes.install = install
-function schemes.install(protocol,handler,threshold)
- loaded [protocol] = 0
- reused [protocol] = 0
- finders [protocol] = function (filename,filetype) return finders.schemes(protocol,filename,handler) end
- openers [protocol] = function (filename) return openers.schemes(protocol,filename) end
- loaders [protocol] = function (filename) return loaders.schemes(protocol,filename) end
- thresholds[protocol] = threshold or schemes.threshold
-end
-
-local function http_handler(protocol,name,cachename)
+local function http_handler(specification,cachename)
local tempname = cachename .. ".tmp"
local f = io.open(tempname,"wb")
local status, message = http.request {
- url = name,
+ url = specification.original,
sink = ltn12.sink.file(f)
}
if not status then
@@ -114,9 +112,9 @@ local function http_handler(protocol,name,cachename)
return cachename
end
-schemes.install('http',http_handler)
-schemes.install('https')
-schemes.install('ftp')
+install('http',http_handler)
+install('https') -- see pod
+install('ftp')
statistics.register("scheme handling time", function()
local l, r, nl, nr = { }, { }, 0, 0
@@ -142,6 +140,3 @@ statistics.register("scheme handling time", function()
return nil
end
end)
-
---~ trace_schemes = true
---~ print(schemes.fetch("http","http://www.pragma-ade.com/show-man.pdf",http_handler))
diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua
index 393c787d6..5c6cf5083 100644
--- a/tex/context/base/data-tex.lua
+++ b/tex/context/base/data-tex.lua
@@ -6,24 +6,14 @@ if not modules then modules = { } end modules ['data-tex'] = {
license = "see context related readme files"
}
--- special functions that deal with io
-
-local format, lower = string.format, string.lower
-local unpack = unpack or table.unpack
-
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
local resolvers = resolvers
-local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
-
-local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check
-
--- the main text reader --
-
-local sequencers = utilities.sequencers
+local sequencers = utilities.sequencers
+local methodhandler = resolvers.methodhandler
local fileprocessor = nil
local lineprocessor = nil
@@ -35,24 +25,26 @@ local textfileactions = sequencers.reset {
}
local textlineactions = sequencers.reset {
- arguments = "str,filename,linenumber",
+ arguments = "str,filename,linenumber,noflines",
returnvalues = "str",
results = "str",
}
-openers.textfileactions = textfileactions
-openers.textlineactions = textlineactions
+local helpers = resolvers.openers.helpers
+local appendgroup = sequencers.appendgroup
-sequencers.appendgroup(textfileactions,"system")
-sequencers.appendgroup(textfileactions,"user")
+helpers.textfileactions = textfileactions
+helpers.textlineactions = textlineactions
-sequencers.appendgroup(textlineactions,"system")
-sequencers.appendgroup(textlineactions,"user")
+appendgroup(textfileactions,"before") -- user
+appendgroup(textfileactions,"system") -- private
+appendgroup(textfileactions,"after" ) -- user
-function openers.textopener(tag,filename,file_handle)
- if textfileactions.dirty then
- fileprocessor = sequencers.compile(textfileactions)
- end
+appendgroup(textlineactions,"before") -- user
+appendgroup(textlineactions,"system") -- private
+appendgroup(textlineactions,"after" ) -- user
+
+function helpers.textopener(tag,filename,file_handle)
local lines
if not file_handle then
lines = io.loaddata(filename)
@@ -67,7 +59,7 @@ function openers.textopener(tag,filename,file_handle)
if type(lines) == "string" then
local kind = unicode.filetype(lines)
if trace_locating then
- report_resolvers("%s opener, file '%s' opened using method '%s'",tag,filename,kind)
+ report_resolvers("%s opener, '%s' opened using method '%s'",tag,filename,kind)
end
if kind == "utf-16-be" then
lines = unicode.utf16_to_utf8_be(lines)
@@ -78,327 +70,89 @@ function openers.textopener(tag,filename,file_handle)
elseif kind == "utf-32-le" then
lines = unicode.utf32_to_utf8_le(lines)
else -- utf8 or unknown
+ if textfileactions.dirty then -- maybe use autocompile
+ fileprocessor = sequencers.compile(textfileactions)
+ end
lines = fileprocessor(lines,filename) or lines
lines = string.splitlines(lines)
end
elseif trace_locating then
- report_resolvers("%s opener, file '%s' opened",tag,filename)
+ report_resolvers("%s opener, '%s' opened",tag,filename)
end
- local t = {
- lines = lines,
- current = 0,
- handle = nil,
- noflines = #lines,
- close = function()
+ return {
+ filename = filename,
+ noflines = #lines,
+ currentline = 0,
+ close = function()
if trace_locating then
- report_resolvers("%s closer, file '%s' closed",tag,filename)
+ report_resolvers("%s closer, '%s' closed",tag,filename)
end
logs.show_close(filename)
t = nil
end,
- reader = function(self)
+ reader = function(self)
self = self or t
- local current, noflines = self.current, self.noflines
- if current >= noflines then
+ local currentline, noflines = self.currentline, self.noflines
+ if currentline >= noflines then
return nil
else
- current = current + 1
- self.current = current
- local line = lines[current]
- if not line then
+ currentline = currentline + 1
+ self.currentline = currentline
+ local content = lines[currentline]
+ if not content then
return nil
- elseif line == "" then
+ elseif content == "" then
return ""
else
if textlineactions.dirty then
- lineprocessor = sequencers.compile(textlineactions)
+ lineprocessor = sequencers.compile(textlineactions) -- maybe use autocompile
end
- return lineprocessor(line,filename,current) or line
+ return lineprocessor(content,filename,currentline,noflines) or content
end
end
end
}
- return t
-end
-
-local data, n, template = { }, 0, "virtual://virtualfile:%s"
-
--- todo: report statistics
-
-function savers.virtual(content)
- n = n + 1
- local filename = format(template,n)
- if trace_locating then
- report_resolvers("%s finder: virtual file '%s' saved",tag,filename)
- end
- data[filename] = content
- return filename
end
-function finders.virtual(filename,filetype,specification)
- local path = specification and specification.path
- local name = path ~= "" and path or filename
- local d = data[name]
- if d then
- if trace_locating then
- report_resolvers("virtual finder: file '%s' found",filename)
- end
- return filename
- else
- if trace_locating then
- report_resolvers("virtual finder: unknown file '%s'",filename)
- end
- return unpack(finders.notfound)
- end
+function resolvers.findtexfile(filename,filetype)
+ return methodhandler('finders',filename,filetype)
end
-function openers.virtual(filename,filetype,specification) -- duplicate ... todo: specification
- local path = specification and specification.path
- local name = path ~= "" and path or filename
- local d = data[name]
- if d then
- if trace_locating then
- report_resolvers("virtual opener, file '%s' opened",filename)
- end
- data[filename] = nil
- return openers.textopener("virtual",filename,d)
- else
- if trace_locating then
- report_resolvers("virtual opener, file '%s' not found",filename)
- end
- return unpack(openers.notfound)
- end
+function resolvers.opentexfile(filename)
+ return methodhandler('openers',filename)
end
-function loaders.virtual(filename,filetype,specification)
- local path = specification and specification.path
- local name = path ~= "" and path or filename
- local d = data[name]
- if d then
- if trace_locating then
- report_resolvers("virtual loader, file '%s' loaded",filename)
- end
- data[filename] = nil
- return true, d, #d
- end
- if trace_locating then
- report_resolvers("virtual loader, file '%s' not loaded",filename)
- end
- return unpack(loaders.notfound)
+function resolvers.openfile(filename)
+ local fullname = methodhandler('finders',filename)
+ return fullname and fullname ~= "" and methodhandler('openers',fullname) or nil
end
--- could be a finder (beware: the generic finders take a tag!)
-
-function finders.generic(tag,filename,filetype)
- local foundname = resolvers.findfile(filename,filetype)
- if foundname and foundname ~= "" then
- if trace_locating then
- report_resolvers("%s finder: file '%s' found",tag,filename)
- end
- return foundname
- else
- if trace_locating then
- report_resolvers("%s finder: unknown file '%s'",tag,filename)
- end
- return unpack(finders.notfound)
- end
+function resolvers.loadtexfile(filename,filetype)
+ -- todo: optionally apply filters
+ local ok, data, size = resolvers.loadbinfile(filename, filetype)
+ return data or ""
end
-function openers.generic(tag,filename)
- if filename and filename ~= "" then
- local f = io.open(filename,"r")
- if f then
- logs.show_open(filename) -- todo
- if trace_locating then
- report_resolvers("%s opener, file '%s' opened",tag,filename)
- end
- return openers.textopener(tag,filename,f)
- end
- end
- if trace_locating then
- report_resolvers("%s opener, file '%s' not found",tag,filename)
- end
- return unpack(openers.notfound)
-end
+resolvers.texdatablob = resolvers.loadtexfile
-function loaders.generic(tag,filename)
- if filename and filename ~= "" then
- local f = io.open(filename,"rb")
- if f then
- logs.show_load(filename)
- if trace_locating then
- report_resolvers("%s loader, file '%s' loaded",tag,filename)
- end
- local s = f:read("*a")
- if checkgarbage then
- checkgarbage(#s)
- end
- f:close()
- if s then
- return true, s, #s
- end
- end
+local function installhandler(namespace,what,where,func)
+ if not func then
+ where, func = "after", where
end
- if trace_locating then
- report_resolvers("%s loader, file '%s' not found",tag,filename)
- end
- return unpack(loaders.notfound)
-end
-
-function finders.tex(filename,filetype)
- return finders.generic('tex',filename,filetype)
-end
-
-function openers.tex(filename)
- return openers.generic('tex',filename)
-end
-
-function loaders.tex(filename)
- return loaders.generic('tex',filename)
-end
-
-function resolvers.findtexfile(filename, filetype)
- return resolvers.methodhandler('finders',filename, filetype)
-end
-
-function resolvers.opentexfile(filename)
- return resolvers.methodhandler('openers',filename)
-end
-
-function resolvers.openfile(filename)
- local fullname = resolvers.findtexfile(filename)
- if fullname and (fullname ~= "") then
- return resolvers.opentexfile(fullname)
+ if where == "before" or where == "after" then
+ sequencers.appendaction(namespace,where,func)
else
- return nil
+ report_resolvers("installing input %s handlers in %s is not possible",what,tostring(where))
end
end
-function resolvers.loadtexfile(filename, filetype)
- -- todo: apply filters
- local ok, data, size = resolvers.loadbinfile(filename, filetype)
- return data or ""
-end
-
-resolvers.texdatablob = resolvers.loadtexfile
+function resolvers.installinputlinehandler(...) installhandler(helpers.textlineactions,"line",...) end
+function resolvers.installinputfilehandler(...) installhandler(helpers.textfileactions,"file",...) end
--- -- keep this one as reference as it's the first version
---
--- resolvers.filters = resolvers.filters or { }
---
--- local input_translator, utf_translator, user_translator = nil, nil, nil
---
--- function resolvers.filters.install(name,func)
--- if name == "input" then input_translator = func
--- elseif name == "utf" then utf_translator = func
--- elseif name == "user" then user_translator = func end
--- end
---
--- function openers.textopener(tag,filename,file_handle)
--- local u = unicode.utftype(file_handle)
--- local t = { }
--- if u > 0 then
--- if trace_locating then
--- report_resolvers("%s opener, file '%s' opened using method '%s'",tag,filename,unicode.utfname[u])
--- end
--- local l
--- local data = file_handle:read("*a")
--- if u > 2 then
--- l = unicode.utf32_to_utf8(data,u==4)
--- elseif u > 1 then
--- l = unicode.utf16_to_utf8(data,u==2)
--- else
--- l = string.splitlines(data)
--- end
--- file_handle:close()
--- t = {
--- utftype = u, -- may go away
--- lines = l,
--- current = 0, -- line number, not really needed
--- handle = nil,
--- noflines = #l,
--- close = function()
--- if trace_locating then
--- report_resolvers("%s closer, file '%s' closed",tag,filename)
--- end
--- logs.show_close(filename)
--- t = nil
--- end,
--- reader = function(self)
--- self = self or t
--- local current, lines = self.current, self.lines
--- if current >= #lines then
--- return nil
--- else
--- current = current + 1
--- self.current = current
--- local line = lines[current]
--- if not line then
--- return nil
--- elseif line == "" then
--- return ""
--- else
--- if input_translator then
--- line = input_translator(line)
--- end
--- if utf_translator then
--- line = utf_translator(line)
--- end
--- if user_translator then
--- line = user_translator(line)
--- end
--- return line
--- end
--- end
--- end
--- }
--- else
--- if trace_locating then
--- report_resolvers("%s opener, file '%s' opened",tag,filename)
--- end
--- -- todo: file;name -> freeze / eerste regel scannen -> freeze
--- --~ local data = lpegmatch(getlines,file_handle:read("*a"))
--- --~ local n = 0
--- t = {
--- reader = function() -- self
--- local line = file_handle:read()
--- --~ n = n + 1
--- --~ local line = data[n]
--- --~ print(line)
--- if not line then
--- return nil
--- elseif line == "" then
--- return ""
--- else
--- if input_translator then
--- line = input_translator(line)
--- end
--- if utf_translator then
--- line = utf_translator(line)
--- end
--- if user_translator then
--- line = user_translator(line)
--- end
--- return line
--- end
--- end,
--- close = function()
--- if trace_locating then
--- report_resolvers("%s closer, file '%s' closed",tag,filename)
--- end
--- logs.show_close(filename)
--- file_handle:close()
--- t = nil
--- collectgarbage("step") -- saves some memory, maybe checkgarbage but no #
--- end,
--- handle = function()
--- return file_handle
--- end,
--- noflines = function()
--- t.noflines = io.noflines(file_handle)
--- return t.noflines
--- end
--- }
--- end
--- return t
--- end
+-- local basename = file.basename
+-- resolvers.installinputlinehandler(function(str,filename,linenumber,noflines)
+-- logs.simple("[lc] file: %s, line: %s of %s, length: %s",basename(filename),linenumber,noflines,#str)
+-- end)
+-- resolvers.installinputfilehandler(function(str,filename)
+-- logs.simple("[fc] file: %s, length: %s",basename(filename),#str)
+-- end)
diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua
index f119e52e7..b3b96094a 100644
--- a/tex/context/base/data-tre.lua
+++ b/tex/context/base/data-tre.lua
@@ -9,7 +9,8 @@ if not modules then modules = { } end modules ['data-tre'] = {
-- \input tree://oeps1/**/oeps.tex
local find, gsub, format = string.find, string.gsub, string.format
-local unpack = unpack or table.unpack
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_resolvers = logs.new("resolvers")
@@ -17,10 +18,10 @@ local resolvers = resolvers
local done, found, notfound = { }, { }, resolvers.finders.notfound
-function resolvers.finders.tree(specification,filetype)
- local fnd = found[specification]
- if not fnd then
- local spec = resolvers.splitmethod(specification).path or ""
+function resolvers.finders.tree(specification)
+ local spec = specification.filename
+ local fnd = found[spec]
+ if fnd == nil then
if spec ~= "" then
local path, name = file.dirname(spec), file.basename(spec)
if path == "" then path = "." end
@@ -34,50 +35,38 @@ function resolvers.finders.tree(specification,filetype)
for k=1,#hash do
local v = hash[k]
if find(v,pattern) then
- found[specification] = v
+ found[spec] = v
return v
end
end
end
- fnd = unpack(notfound) -- unpack ? why not just notfound[1]
- found[specification] = fnd
+ fnd = notfound() -- false
+ found[spec] = fnd
end
return fnd
end
function resolvers.locators.tree(specification)
- local spec = resolvers.splitmethod(specification)
- local path = spec.path
- if path ~= '' and lfs.isdir(path) then
+ local name = specification.filename
+ if name ~= '' and lfs.isdir(name) then
if trace_locating then
- report_resolvers("tree locator '%s' found (%s)",path,specification)
+ report_resolvers("tree locator '%s' found",name)
end
- resolvers.appendhash('tree',specification,path,false) -- don't cache
+ resolvers.appendhash('tree',name,false) -- don't cache
elseif trace_locating then
- report_resolvers("tree locator '%s' not found",path)
+ report_resolvers("tree locator '%s' not found",name)
end
end
-function resolvers.hashers.tree(tag,name)
+function resolvers.hashers.tree(specification)
+ local name = specification.filename
if trace_locating then
- report_resolvers("analysing tree '%s' as '%s'",name,tag)
+ report_resolvers("analysing tree '%s'",name)
end
- -- todo: maybe share with done above
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
-end
-
-function resolvers.generators.tree(tag)
- local spec = resolvers.splitmethod(tag)
- local path = spec.path
- resolvers.generators.tex(path,tag) -- we share this with the normal tree analyzer
-end
-
-function resolvers.concatinators.tree(tag,path,name)
- return file.join(tag,path,name)
+ resolvers.methodhandler("hashers",name)
end
-resolvers.isreadable.tree = file.isreadable
-resolvers.openers.tree = resolvers.openers.generic
-resolvers.loaders.tree = resolvers.loaders.generic
+resolvers.concatinators.tree = resolvers.concatinators.file
+resolvers.generators.tree = resolvers.generators.file
+resolvers.openers.tree = resolvers.openers.file
+resolvers.loaders.tree = resolvers.loaders.file
diff --git a/tex/context/base/data-vir.lua b/tex/context/base/data-vir.lua
new file mode 100644
index 000000000..c1a6b3366
--- /dev/null
+++ b/tex/context/base/data-vir.lua
@@ -0,0 +1,81 @@
+if not modules then modules = { } end modules ['data-vir'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+local trace_virtual = false
+local report_resolvers = logs.new("resolvers")
+
+trackers.register("resolvers.locating", function(v) trace_virtual = v end)
+trackers.register("resolvers.virtual", function(v) trace_virtual = v end)
+
+local resolvers = resolvers
+
+local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers
+
+local data, n, template = { }, 0, "virtual://%s.%s" -- hm, number can be query
+
+function savers.virtual(specification,content)
+ n = n + 1 -- one number for all namespaces
+ local path = specification.path
+ local filename = format(template,path ~= "" and path or "virtualfile",n)
+ if trace_virtual then
+ report_resolvers("virtual saver: file '%s' saved",filename)
+ end
+ data[filename] = content
+ return filename
+end
+
+function finders.virtual(specification)
+ local original = specification.original
+ local d = data[original]
+ if d then
+ if trace_virtual then
+ report_resolvers("virtual finder: file '%s' found",original)
+ end
+ return original
+ else
+ if trace_virtual then
+ report_resolvers("virtual finder: unknown file '%s'",original)
+ end
+ return finders.notfound()
+ end
+end
+
+function openers.virtual(specification)
+ local original = specification.original
+ local d = data[original]
+ if d then
+ if trace_virtual then
+ report_resolvers("virtual opener, file '%s' opened",original)
+ end
+ data[original] = nil
+ return openers.helpers.textopener("virtual",original,d)
+ else
+ if trace_virtual then
+ report_resolvers("virtual opener, file '%s' not found",original)
+ end
+ return openers.notfound()
+ end
+end
+
+function loaders.virtual(specification)
+ local original = specification.original
+ local d = data[original]
+ if d then
+ if trace_virtual then
+ report_resolvers("virtual loader, file '%s' loaded",original)
+ end
+ data[original] = nil
+ return true, d, #d
+ end
+ if trace_virtual then
+ report_resolvers("virtual loader, file '%s' not loaded",original)
+ end
+ return loaders.notfound()
+end
diff --git a/tex/context/base/data-zip.lua b/tex/context/base/data-zip.lua
index 10c1b74f0..9ec440723 100644
--- a/tex/context/base/data-zip.lua
+++ b/tex/context/base/data-zip.lua
@@ -6,10 +6,9 @@ if not modules then modules = { } end modules ['data-zip'] = {
license = "see context related readme files"
}
--- to be redone using the more recent schemes mechanism
+-- partly redone .. needs testing
local format, find, match = string.format, string.find, string.match
-local unpack = unpack or table.unpack
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
@@ -32,9 +31,6 @@ local archives = zip.archives
zip.registeredfiles = zip.registeredfiles or { }
local registeredfiles = zip.registeredfiles
-local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders
-local locators, hashers, concatinators = resolvers.locators, resolvers.hashers, resolvers.concatinators
-
local function validzip(str) -- todo: use url splitter
if not find(str,"^zip://") then
return "zip:///" .. str
@@ -64,159 +60,159 @@ function zip.closearchive(name)
end
end
-function locators.zip(specification) -- where is this used? startup zips (untested)
- specification = resolvers.splitmethod(specification)
- local zipfile = specification.path
- local zfile = zip.openarchive(name) -- tricky, could be in to be initialized tree
+function resolvers.locators.zip(specification)
+ local archive = specification.filename
+ local zipfile = archive and archive ~= "" and zip.openarchive(archive) -- tricky, could be in to be initialized tree
if trace_locating then
- if zfile then
- report_resolvers("zip locator, archive '%s' found",specification.original)
+ if zipfile then
+ report_resolvers("zip locator, archive '%s' found",archive)
else
- report_resolvers("zip locator, archive '%s' not found",specification.original)
+ report_resolvers("zip locator, archive '%s' not found",archive)
end
end
end
-function hashers.zip(tag,name)
+function resolvers.hashers.zip(specification)
+ local archive = specification.filename
if trace_locating then
- report_resolvers("loading zip file '%s' as '%s'",name,tag)
+ report_resolvers("loading zip file '%s'",archive)
end
- resolvers.usezipfile(format("%s?tree=%s",tag,name))
+ resolvers.usezipfile(specification.original)
end
-function concatinators.zip(tag,path,name)
+function resolvers.concatinators.zip(zipfile,path,name) -- ok ?
if not path or path == "" then
- return format('%s?name=%s',tag,name)
+ return format('%s?name=%s',zipfile,name)
else
- return format('%s?name=%s/%s',tag,path,name)
+ return format('%s?name=%s/%s',zipfile,path,name)
end
end
-function resolvers.isreadable.zip(name)
- return true
-end
-
-function finders.zip(specification,filetype)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.finders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip finder, archive '%s' found",specification.path)
+ report_resolvers("zip finder, archive '%s' found",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
dfile = zfile:close()
if trace_locating then
- report_resolvers("zip finder, file '%s' found",q.name)
+ report_resolvers("zip finder, file '%s' found",queryname)
end
return specification.original
elseif trace_locating then
- report_resolvers("zip finder, file '%s' not found",q.name)
+ report_resolvers("zip finder, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip finder, unknown archive '%s'",specification.path)
+ report_resolvers("zip finder, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip finder, '%s' not found",filename)
+ report_resolvers("zip finder, '%s' not found",original)
end
- return unpack(finders.notfound)
+ return resolvers.finders.notfound()
end
-function openers.zip(specification)
- local zipspecification = resolvers.splitmethod(specification)
- if zipspecification.path then
- local q = url.query(zipspecification.query)
- if q.name then
- local zfile = zip.openarchive(zipspecification.path)
+function resolvers.openers.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip opener, archive '%s' opened",zipspecification.path)
+ report_resolvers("zip opener, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_open(specification)
+ logs.show_open(original)
if trace_locating then
- report_resolvers("zip opener, file '%s' found",q.name)
+ report_resolvers("zip opener, file '%s' found",queryname)
end
- return openers.textopener('zip',specification,dfile)
+ return resolvers.openers.helpers.textopener('zip',original,dfile)
elseif trace_locating then
- report_resolvers("zip opener, file '%s' not found",q.name)
+ report_resolvers("zip opener, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip opener, unknown archive '%s'",zipspecification.path)
+ report_resolvers("zip opener, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip opener, '%s' not found",filename)
+ report_resolvers("zip opener, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-function loaders.zip(specification)
- specification = resolvers.splitmethod(specification)
- if specification.path then
- local q = url.query(specification.query)
- if q.name then
- local zfile = zip.openarchive(specification.path)
+function resolvers.loaders.zip(specification)
+ local original = specification.original
+ local archive = specification.filename
+ if archive then
+ local query = url.query(specification.query)
+ local queryname = query.name
+ if queryname then
+ local zfile = zip.openarchive(archive)
if zfile then
if trace_locating then
- report_resolvers("zip loader, archive '%s' opened",specification.path)
+ report_resolvers("zip loader, archive '%s' opened",archive)
end
- local dfile = zfile:open(q.name)
+ local dfile = zfile:open(queryname)
if dfile then
- logs.show_load(filename)
+ logs.show_load(original)
if trace_locating then
- report_resolvers("zip loader, file '%s' loaded",filename)
+ report_resolvers("zip loader, file '%s' loaded",original)
end
local s = dfile:read("*all")
dfile:close()
return true, s, #s
elseif trace_locating then
- report_resolvers("zip loader, file '%s' not found",q.name)
+ report_resolvers("zip loader, file '%s' not found",queryname)
end
elseif trace_locating then
- report_resolvers("zip loader, unknown archive '%s'",specification.path)
+ report_resolvers("zip loader, unknown archive '%s'",archive)
end
end
end
if trace_locating then
- report_resolvers("zip loader, '%s' not found",filename)
+ report_resolvers("zip loader, '%s' not found",original)
end
- return unpack(openers.notfound)
+ return resolvers.openers.notfound()
end
-- zip:///somefile.zip
-- zip:///somefile.zip?tree=texmf-local -> mount
-function resolvers.usezipfile(zipname)
- zipname = validzip(zipname)
- local specification = resolvers.splitmethod(zipname)
- local zipfile = specification.path
- if zipfile and not registeredfiles[zipname] then
- local tree = url.query(specification.query).tree or ""
- local z = zip.openarchive(zipfile)
+function resolvers.usezipfile(archive)
+ local specification = resolvers.splitmethod(archive) -- to be sure
+ local archive = specification.filename
+ if archive and not registeredfiles[archive] then
+ local z = zip.openarchive(archive)
if z then
- local instance = resolvers.instance
+ local tree = url.query(specification.query).tree or ""
if trace_locating then
- report_resolvers("zip registering, registering archive '%s'",zipname)
+ report_resolvers("zip registering, registering archive '%s'",archive)
end
- statistics.starttiming(instance)
- resolvers.prependhash('zip',zipname,zipfile)
- resolvers.extendtexmfvariable(zipname) -- resets hashes too
- registeredfiles[zipname] = z
- instance.files[zipname] = resolvers.registerzipfile(z,tree or "")
- statistics.stoptiming(instance)
+ statistics.starttiming(resolvers.instance)
+ resolvers.prependhash('zip',archive)
+ resolvers.extendtexmfvariable(archive) -- resets hashes too
+ registeredfiles[archive] = z
+ instance.files[archive] = resolvers.registerzipfile(z,tree)
+ statistics.stoptiming(resolvers.instance)
elseif trace_locating then
- report_resolvers("zip registering, unknown archive '%s'",zipname)
+ report_resolvers("zip registering, unknown archive '%s'",archive)
end
elseif trace_locating then
- report_resolvers("zip registering, '%s' not found",zipname)
+ report_resolvers("zip registering, '%s' not found",archive)
end
end
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index 3a479497b..326cc0d53 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -2762,6 +2762,12 @@
mark=yes,mkmk=yes,kern=yes,curs=yes]
\definefontfeature
+ [simplearabic]
+ [mode=node,language=dflt,script=arab,
+ init=yes,medi=yes,fina=yes,calt=yes,
+ rlig=yes,curs=yes,mark=yes,mkmk=yes]
+
+\definefontfeature
[none]
[mode=none,features=no]
diff --git a/tex/context/base/grph-fig.mkiv b/tex/context/base/grph-fig.mkiv
index 856821b45..2b745d218 100644
--- a/tex/context/base/grph-fig.mkiv
+++ b/tex/context/base/grph-fig.mkiv
@@ -516,21 +516,11 @@
\dodotypesetbuffer[\jobname][]
\fi\fi}
-% \def\dodotypesetbuffer[#1][#2]%
-% {\bgroup
-% \global\advance\noftypesetbuffers\plusone
-% \edef\bufferfilename{\jobname-buffer-\the\noftypesetbuffers}%
-% \doifmode{*\v!first}
-% {\ctxlua{buffers.save("\bufferfilename.tmp","#1",true)}%
-% \executesystemcommand{context \bufferfilename.tmp}}%
-% \externalfigure[\bufferfilename.pdf][#2]%
-% \egroup}
-
\def\dodotypesetbuffer[#1][#2]%
{\bgroup
\global\advance\noftypesetbuffers\plusone
\edef\bufferfilename{\jobname-buffer-\the\noftypesetbuffers}%
- \ctxlua{buffers.run("\bufferfilename.tmp","#1",true)}%
+ \ctxlua{commands.runbuffer("\bufferfilename.tmp","#1",true)}%
\externalfigure[\bufferfilename.pdf][#2]%
\egroup}
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index b35973eef..2c4ff999b 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -35,25 +35,44 @@ end
-- optimizing for no find (*) does not save time
+--~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs
+--~ local ok, scanner
+--~ if path == "/" then
+--~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+--~ else
+--~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+--~ end
+--~ if ok and type(scanner) == "function" then
+--~ if not find(path,"/$") then path = path .. '/' end
+--~ for name in scanner do
+--~ local full = path .. name
+--~ local mode = attributes(full,'mode')
+--~ if mode == 'file' then
+--~ if find(full,patt) then
+--~ action(full)
+--~ end
+--~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+--~ globpattern(full,patt,recurse,action)
+--~ end
+--~ end
+--~ end
+--~ end
+
local function globpattern(path,patt,recurse,action)
- local ok, scanner
if path == "/" then
- ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner do
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
+ for name in walkdir(path) do
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if find(full,patt) then
+ action(full)
end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ globpattern(full,patt,recurse,action)
end
end
end
diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua
index a2c461190..fe69c9181 100644
--- a/tex/context/base/l-file.lua
+++ b/tex/context/base/l-file.lua
@@ -378,6 +378,9 @@ local separator = P("://")
local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
local rootbased = P("/") + letter*P(":")
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
+
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 940652ee3..be5d56947 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -18,6 +18,7 @@ local patterns = lpeg.patterns
local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
local Ct, C, Cs, Cc, Cf, Cg = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local lpegtype = lpeg.type
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -34,7 +35,6 @@ patterns.alwaysmatched = alwaysmatched
local digit, sign = R('09'), S('+-')
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
local newline = crlf + cr + lf
-local utf8next = R("\128\191")
local escaped = P("\\") * anything
local squote = P("'")
local dquote = P('"')
@@ -55,6 +55,8 @@ local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le
+ utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+local utf8next = R("\128\191")
+
patterns.utf8one = R("\000\127")
patterns.utf8two = R("\194\223") * utf8next
patterns.utf8three = R("\224\239") * utf8next * utf8next
@@ -285,19 +287,25 @@ end
-- Just for fun I looked at the used bytecode and
-- p = (p and p + pp) or pp gets one more (testset).
-function lpeg.replacer(t)
- if #t > 0 then
- local p
- for i=1,#t do
- local ti= t[i]
- local pp = P(ti[1]) / ti[2]
- if p then
- p = p + pp
- else
- p = pp
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
end
+ return Cs((p + 1)^0)
end
- return Cs((p + 1)^0)
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
end
end
@@ -521,3 +529,7 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
end
return p
end
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua
index 7a45b98bf..69a836787 100644
--- a/tex/context/base/l-string.lua
+++ b/tex/context/base/l-string.lua
@@ -80,7 +80,7 @@ local patterns_escapes = {
["."] = "%.",
["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
+ ["("] = "%(", [")"] = "%)",
-- ["{"] = "%{", ["}"] = "%}"
-- ["^"] = "%^", ["$"] = "%$",
}
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
index f8752f8a3..47d8127de 100644
--- a/tex/context/base/l-url.lua
+++ b/tex/context/base/l-url.lua
@@ -6,72 +6,95 @@ if not modules then modules = { } end modules ['l-url'] = {
license = "see context related readme files"
}
-local char, gmatch, gsub, format, byte = string.char, string.gmatch, string.gsub, string.format, string.byte
+local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find
local concat = table.concat
local tonumber, type = tonumber, type
-local lpegmatch, lpegP, lpegC, lpegR, lpegS, lpegCs, lpegCc = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc
+local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct
+local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
--- from the spec (on the web):
+-- from wikipedia:
--
--- foo://example.com:8042/over/there?name=ferret#nose
--- \_/ \______________/\_________/ \_________/ \__/
--- | | | | |
--- scheme authority path query fragment
--- | _____________________|__
--- / \ / \
--- urn:example:animal:ferret:nose
+-- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
+-- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
+-- | | | | | | | |
+-- | userinfo hostname port | | query fragment
+-- | \________________________________/\_____________|____|/
+-- scheme | | | |
+-- | authority path | |
+-- | | |
+-- | path interpretable as filename
+-- | ___________|____________ |
+-- / \ / \ |
+-- urn:example:animal:ferret:nose interpretable as extension
url = url or { }
local url = url
-local function tochar(s)
- return char(tonumber(s,16))
-end
+local tochar = function(s) return char(tonumber(s,16)) end
-local colon, qmark, hash, slash, percent, endofstring = lpegP(":"), lpegP("?"), lpegP("#"), lpegP("/"), lpegP("%"), lpegP(-1)
+local colon = P(":")
+local qmark = P("?")
+local hash = P("#")
+local slash = P("/")
+local percent = P("%")
+local endofstring = P(-1)
-local hexdigit = lpegR("09","AF","af")
-local plus = lpegP("+")
-local nothing = lpegCc("")
-local escaped = (plus / " ") + (percent * lpegC(hexdigit * hexdigit) / tochar)
+local hexdigit = R("09","AF","af")
+local plus = P("+")
+local nothing = Cc("")
+local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar)
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-local scheme = lpegCs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
-local authority = slash * slash * lpegCs((escaped+(1- slash-qmark-hash))^0) + nothing
-local path = slash * lpegCs((escaped+(1- qmark-hash))^0) + nothing
-local query = qmark * lpegCs((escaped+(1- hash))^0) + nothing
-local fragment = hash * lpegCs((escaped+(1- endofstring))^0) + nothing
-
-local parser = lpeg.Ct(scheme * authority * path * query * fragment)
+local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing
+local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing
+local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing
+local query = qmark * Cs((escaped+(1- hash))^0) + nothing
+local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing
-lpeg.patterns.urlsplitter = parser
+local parser = Ct(scheme * authority * path * query * fragment)
-local escapes = { }
+lpegpatterns.urlsplitter = parser
-for i=0,255 do
- escapes[i] = format("%%%02X",i)
-end
+local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end
-local escaper = lpeg.Cs((lpegR("09","AZ","az") + lpegS("-./_") + lpegP(1) / escapes)^0)
+local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0)
-lpeg.patterns.urlescaper = escaper
+lpegpatterns.urlescaper = escaper
-- todo: reconsider Ct as we can as well have five return values (saves a table)
-- so we can have two parsers, one with and one without
-function url.split(str)
+local function split(str)
return (type(str) == "string" and lpegmatch(parser,str)) or str
end
+local function hasscheme(str)
+ local scheme = lpegmatch(scheme,str) -- at least one character
+ return scheme and scheme ~= ""
+end
+
-- todo: cache them
-function url.hashed(str) -- not yet ok (/test?test)
- local s = url.split(str)
+local rootletter = R("az","AZ")
+ + S("_-+")
+local separator = P("://")
+local qualified = P(".")^0 * P("/")
+ + rootletter * P(":")
+ + rootletter^1 * separator
+ + rootletter^1 * P("/")
+local rootbased = P("/")
+ + rootletter * P(":")
+
+local barswapper = replacer("|",":")
+local backslashswapper = replacer("\\","/")
+
+local function hashed(str) -- not yet ok (/test?test)
+ local s = split(str)
local somescheme = s[1] ~= ""
local somequery = s[4] ~= ""
if not somescheme and not somequery then
- return {
+ s = {
scheme = "file",
authority = "",
path = str,
@@ -79,53 +102,81 @@ function url.hashed(str) -- not yet ok (/test?test)
fragment = "",
original = str,
noscheme = true,
+ filename = str,
}
- else
- return {
+ else -- not always a filename but handy anyway
+ local authority, path, filename = s[2], s[3]
+ if authority == "" then
+ filename = path
+ else
+ filename = authority .. "/" .. path
+ end
+ s = {
scheme = s[1],
- authority = s[2],
- path = s[3],
+ authority = authority,
+ path = path,
query = s[4],
fragment = s[5],
original = str,
noscheme = false,
+ filename = filename,
}
end
+ return s
end
---~ table.print(url.hashed("/test?test"))
+-- Here we assume:
+--
+-- files: /// = relative
+-- files: //// = absolute (!)
-function url.hasscheme(str)
- return url.split(str)[1] ~= ""
-end
+--~ table.print(hashed("file://c:/opt/tex/texmf-local")) -- c:/opt/tex/texmf-local
+--~ table.print(hashed("file://opt/tex/texmf-local" )) -- opt/tex/texmf-local
+--~ table.print(hashed("file:///opt/tex/texmf-local" )) -- opt/tex/texmf-local
+--~ table.print(hashed("file:////opt/tex/texmf-local" )) -- /opt/tex/texmf-local
+--~ table.print(hashed("file:///./opt/tex/texmf-local" )) -- ./opt/tex/texmf-local
+
+--~ table.print(hashed("c:/opt/tex/texmf-local" )) -- c:/opt/tex/texmf-local
+--~ table.print(hashed("opt/tex/texmf-local" )) -- opt/tex/texmf-local
+--~ table.print(hashed("/opt/tex/texmf-local" )) -- /opt/tex/texmf-local
-function url.addscheme(str,scheme)
- return (url.hasscheme(str) and str) or ((scheme or "file:///") .. str)
+url.split = split
+url.hasscheme = hasscheme
+url.hashed = hashed
+
+function url.addscheme(str,scheme) -- no authority
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///" .. str
+ else
+ return scheme .. ":///" .. str
+ end
end
function url.construct(hash) -- dodo: we need to escape !
- local fullurl = { }
+ local fullurl, f = { }, 0
local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
if scheme and scheme ~= "" then
- fullurl[#fullurl+1] = scheme .. "://"
+ f = f + 1 ; fullurl[f] = scheme .. "://"
end
if authority and authority ~= "" then
- fullurl[#fullurl+1] = authority
+ f = f + 1 ; fullurl[f] = authority
end
if path and path ~= "" then
- fullurl[#fullurl+1] = "/" .. path
+ f = f + 1 ; fullurl[f] = "/" .. path
end
if query and query ~= "" then
- fullurl[#fullurl+1] = "?".. query
+ f = f + 1 ; fullurl[f] = "?".. query
end
if fragment and fragment ~= "" then
- fullurl[#fullurl+1] = "#".. fragment
+ f = f + 1 ; fullurl[f] = "#".. fragment
end
return lpegmatch(escaper,concat(fullurl))
end
function url.filename(filename)
- local t = url.hashed(filename)
+ local t = hashed(filename)
return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename
end
@@ -186,3 +237,5 @@ end
--~ test("zip:///oeps/oeps.zip#bla/bla.tex")
--~ test("zip:///oeps/oeps.zip?bla/bla.tex")
+
+--~ table.print(url.hashed("/test?test"))
diff --git a/tex/context/base/lpdf-fmt.lua b/tex/context/base/lpdf-fmt.lua
index a5152c461..348f6eada 100644
--- a/tex/context/base/lpdf-fmt.lua
+++ b/tex/context/base/lpdf-fmt.lua
@@ -344,7 +344,7 @@ local filenames = {
local function locatefile(filename)
local fullname = resolvers.findfile(filename,"icc")
if not fullname or fullname == "" then
- fullname = resolvers.finders.loc(filename) -- could be specific to the project
+ fullname = resolvers.finders.byscheme("loc",filename) -- could be specific to the project
end
return fullname or ""
end
diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv
index b02a88b51..29de9a0ea 100644
--- a/tex/context/base/luat-lib.mkiv
+++ b/tex/context/base/luat-lib.mkiv
@@ -35,11 +35,13 @@
\registerctxluafile{data-tmp}{1.001}
\registerctxluafile{data-met}{1.001}
\registerctxluafile{data-res}{1.001}
-
-\registerctxluafile{data-pre}{1.001}
\registerctxluafile{data-inp}{1.001}
\registerctxluafile{data-out}{1.001}
+\registerctxluafile{data-fil}{1.001}
+
+\registerctxluafile{data-pre}{1.001}
\registerctxluafile{data-tex}{1.001}
+\registerctxluafile{data-vir}{1.001}
\registerctxluafile{data-bin}{1.001}
\registerctxluafile{data-zip}{1.001}
%registerctxluafile{data-crl}{1.001}
diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua
index f4f7779da..4a9e53b0e 100644
--- a/tex/context/base/luat-mac.lua
+++ b/tex/context/base/luat-mac.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['luat-mac'] = {
license = "see context related readme files"
}
-local P, V, S, R, C, Cs = lpeg.P, lpeg.V, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs
+local P, V, S, R, C, Cs, Cmt = lpeg.P, lpeg.V, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cmt
local lpegmatch, patterns = lpeg.match, lpeg.patterns
local insert, remove = table.insert, table.remove
@@ -64,8 +64,9 @@ local spaces = space^1
local newline = patterns.newline
local nobrace = 1 - leftbrace - rightbrace
-local name = R("AZ","az")^1
-local variable = P("#") * name
+local name = R("AZ","az")^1 -- @?! -- utf?
+local longname = (leftbrace/"") * (nobrace^1) * (rightbrace/"")
+local variable = P("#") * Cs(name + longname)
local escapedname = escape * name
local definer = escape * (P("def") + P("egdx") * P("def"))
local startcode = P("\\starttexdefinition")
@@ -78,6 +79,10 @@ local poplocal = always / pop
local declaration = variable / set
local identifier = variable / get
+local function matcherror(str,pos)
+ report_macros("runaway definition at: %s",string.sub(str,pos-30,pos))
+end
+
local grammar = { "converter",
texcode = pushlocal
* startcode
@@ -89,8 +94,8 @@ local grammar = { "converter",
* stopcode
* poplocal,
texbody = ( V("definition")
- + V("braced")
+ identifier
+ + V("braced")
+ (1 - stopcode)
)^0,
definition = pushlocal
@@ -101,12 +106,13 @@ local grammar = { "converter",
* poplocal,
braced = leftbrace
* ( V("definition")
+ + identifier
+ V("texcode")
+ V("braced")
- + identifier
+ nobrace
)^0
- * rightbrace,
+ -- * rightbrace^-1, -- the -1 catches errors
+ * (rightbrace + Cmt(always,matcherror)),
pattern = V("definition") + V("texcode") + anything,
@@ -122,8 +128,8 @@ local checker = P("%") * (1 - newline - P("macros"))^0
local macros = { } resolvers.macros = macros
-function macros.preprocessed(data)
- return lpegmatch(parser,data)
+function macros.preprocessed(str)
+ return lpegmatch(parser,str)
end
function macros.convertfile(oldname,newname)
@@ -136,27 +142,40 @@ function macros.version(data)
return lpegmatch(checker,data)
end
-local function handler(protocol,name,cachename)
- local hashed = url.hashed(name)
- local path = hashed.path
- if path and path ~= "" then
- local data = resolvers.loadtexfile(path)
- data = lpegmatch(parser,data) or ""
- io.savedata(cachename,data)
- end
- return cachename
-end
-
-resolvers.schemes.install('mkvi',handler,1) -- this will cache !
-
function macros.processmkvi(str,filename)
- if file.suffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
+ if (filename and file.suffix(filename) == "mkvi") or lpegmatch(checker,str) == "mkvi" then
return lpegmatch(parser,str) or str
else
return str
end
end
-utilities.sequencers.appendaction(resolvers.openers.textfileactions,"system","resolvers.macros.processmkvi")
--- utilities.sequencers.disableaction(resolvers.openers.textfileactions,"resolvers.macros.processmkvi")
+if resolvers.schemes then
+
+ local function handler(protocol,name,cachename)
+ local hashed = url.hashed(name)
+ local path = hashed.path
+ if path and path ~= "" then
+ local data = resolvers.loadtexfile(path)
+ data = lpegmatch(parser,data) or ""
+ io.savedata(cachename,data)
+ end
+ return cachename
+ end
+
+ resolvers.schemes.install('mkvi',handler,1) -- this will cache !
+
+ utilities.sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
+ -- utilities.sequencers.disableaction(resolvers.openers.helpers.textfileactions,"resolvers.macros.processmkvi")
+
+end
+--~ print(macros.preprocessed([[\def\blä#{blá}{blà:#{blá}}]]))
+--~ print(macros.preprocessed([[\def\blä#bla{blà:#bla}]]))
+--~ print(macros.preprocessed([[\def\bla#bla{bla:#bla}]]))
+--~ print(macros.preprocessed([[\def\test#oeps{test:#oeps}]]))
+--~ print(macros.preprocessed([[\def\test#oeps{test:#{oeps}}]]))
+--~ print(macros.preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]]))
+--~ print(macros.preprocessed([[\def\test#{oeps}{test:#oeps}]]))
+--~ macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]])
+--~ print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]]))
diff --git a/tex/context/base/lxml-ctx.lua b/tex/context/base/lxml-ctx.lua
index 04f1c58e0..1f6f6ffd3 100644
--- a/tex/context/base/lxml-ctx.lua
+++ b/tex/context/base/lxml-ctx.lua
@@ -88,7 +88,7 @@ function xml.ctx.tshow(specification)
context.stoptabulate()
if xmlroot and xmlroot ~= "" then
if not loaded[xmlroot] then
- loaded[xmlroot] = xml.convert(buffers.content(xmlroot) or "")
+ loaded[xmlroot] = xml.convert(buffers.getcontent(xmlroot))
end
local collected = xml.filter(loaded[xmlroot],xmlpattern)
if collected then
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index bfa64fe65..31de4d5d4 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -402,7 +402,8 @@ function xml.getbuffer(name,compress,entities) -- we need to make sure that comm
name = tex.jobname
end
nofconverted = nofconverted + 1
- xmltostring(lxml.convert(name,concat(buffers.data[name] or {},""),compress,entities))
+ local data = buffers.getcontent(name)
+ xmltostring(lxml.convert(name,data,compress,entities)) -- one buffer
end
function lxml.loadbuffer(id,name,compress,entities)
@@ -411,7 +412,8 @@ function lxml.loadbuffer(id,name,compress,entities)
end
starttiming(xml)
nofconverted = nofconverted + 1
- local xmltable = lxml.convert(id,buffers.collect(name or id,"\n"),compress,entities)
+ local data = buffers.getcontent(name or id)
+ local xmltable = lxml.convert(id,data,compress,entities)
lxml.store(id,xmltable)
stoptiming(xml)
return xmltable, name or id
diff --git a/tex/context/base/m-database.lua b/tex/context/base/m-database.lua
index b922597c7..66bc89649 100644
--- a/tex/context/base/m-database.lua
+++ b/tex/context/base/m-database.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['m-database'] = {
license = "see context related readme files"
}
-local sub, gmatch = string.sub, string.gmatch
+local sub, gmatch, format = string.sub, string.gmatch, string.format
local concat = table.concat
local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat
local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct
@@ -35,11 +35,11 @@ function buffers.database.process(settings)
local data
local sprint = trace_flush and tracedsprint or sprint
if settings.type == "file" then
- local filename = resolvers.finders.any(settings.database)
+ local filename = resolvers.finders.byscheme("any",settings.database)
data = filename ~= "" and io.loaddata(filename)
data = data and string.splitlines(data)
else
- data = buffers.raw(settings.database)
+ data = buffers.getcontent(settings.database)
end
if data and #data > 0 then
local separatorchar, quotechar, commentchar = settings.separator, settings.quotechar, settings.commentchar
diff --git a/tex/context/base/m-pstricks.lua b/tex/context/base/m-pstricks.lua
index 4fb80c7ed..360c566ee 100644
--- a/tex/context/base/m-pstricks.lua
+++ b/tex/context/base/m-pstricks.lua
@@ -51,7 +51,7 @@ end
function moduledata.pstricks.process(n)
graphics = graphics + 1
local name = string.format("%s-pstricks-%04i",tex.jobname,graphics)
- local data = buffers.collect("def-"..n)
+ local data = buffers.collectcontent("def-"..n)
local tmpfile = name .. ".tmp"
local epsfile = name .. ".ps"
local pdffile = name .. ".pdf"
diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv
index 4d8883c0f..62691c655 100644
--- a/tex/context/base/math-def.mkiv
+++ b/tex/context/base/math-def.mkiv
@@ -240,7 +240,7 @@
#1\crcr\mathstrut\crcr\noalign{\kern-\baselineskip}}}\,}
\definemathcommand [mathstrut] {\vphantom{(}}
-\definemathcommand [joinrel] {\mathrel{\mkern-3mu}}
+\definemathcommand [joinrel] {\mathrel{\mkern-4mu}} % was mistakenly 3mu
% \definemathcommand [matrix] {\PLAINmatrix}
% \definemathcommand [over] {\normalover} % hack, to do
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index e8eea1666..d8b45bb88 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -476,17 +476,21 @@ end
--~ return head, true
--~ end
-tasks.new (
- "math",
- {
+tasks.new {
+ name = "math",
+ arguments = 2,
+ sequence = {
"before",
"normalizers",
"builders",
"after",
}
-)
+}
+
+tasks.freezegroup("math", "normalizers") -- experimental
+tasks.freezegroup("math", "builders") -- experimental
-local actions = tasks.actions("math",2) -- head, style, penalties
+local actions = tasks.actions("math") -- head, style, penalties
local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
diff --git a/tex/context/base/meta-ini.mkiv b/tex/context/base/meta-ini.mkiv
index eec9d71be..917201b27 100644
--- a/tex/context/base/meta-ini.mkiv
+++ b/tex/context/base/meta-ini.mkiv
@@ -678,7 +678,7 @@
% we need this trick because tex.sprint does not interprets newlines and the scanner
% stops at a newline; also, we do need to flush the buffer under a normal catcode
% regime in order to expand embedded tex macros; #1 can be a list
- \processMPgraphic{\ctxlua{buffers.feedback("\currentMPgraphicname")}}%
+ \processMPgraphic{\ctxlua{commands.feedback("\currentMPgraphicname")}}%
\endMPgraphicgroup}}
\def\runMPbuffer
diff --git a/tex/context/base/mult-cld.lua b/tex/context/base/mult-cld.lua
index 32436c5a5..64030724e 100644
--- a/tex/context/base/mult-cld.lua
+++ b/tex/context/base/mult-cld.lua
@@ -37,7 +37,7 @@ local texprint = tex.print
local texiowrite = texio.write
local texcount = tex.count
-local isnode = node.is_node
+local isnode = node.is_node -- after 0.65 just node.type
local writenode = node.write
local ctxcatcodes = tex.ctxcatcodes
@@ -184,12 +184,13 @@ end
-- -- --
-local savedata = resolvers.savers.virtual
+local methodhandler = resolvers.methodhandler
function context.viafile(data)
-- this is the only way to deal with nested buffers
-- and other catcode sensitive data
- context.input(savedata(data))
+ local filename = resolvers.savers.byscheme("virtual","viafile",data)
+ context.input(filename)
end
-- -- --
diff --git a/tex/context/base/mult-ini.lua b/tex/context/base/mult-ini.lua
index be4a7cb69..3e89cf68f 100644
--- a/tex/context/base/mult-ini.lua
+++ b/tex/context/base/mult-ini.lua
@@ -33,6 +33,11 @@ end
local messages, constants, variables = interfaces.messages, interfaces.constants, interfaces.variables
+local valueiskey = { __index = function(t,k) t[k] = k return k end }
+
+setmetatable(variables,valueiskey)
+setmetatable(constants,valueiskey)
+
function interfaces.setmessages(category,str)
local m = messages[category] or { }
for k, v in gmatch(str,"(%S+) *: *(.-) *[\n\r]") do
diff --git a/tex/context/base/node-pag.lua b/tex/context/base/node-pag.lua
index f5a6f9d93..9b8202042 100644
--- a/tex/context/base/node-pag.lua
+++ b/tex/context/base/node-pag.lua
@@ -13,7 +13,7 @@ pagebuilders = pagebuilders or { }
local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-local actions = nodes.tasks.actions("pagebuilders",5)
+local actions = nodes.tasks.actions("pagebuilders")
local function processor(head,groupcode,size,packtype,maxdepth,direction)
starttiming(pagebuilders)
diff --git a/tex/context/base/node-par.lua b/tex/context/base/node-par.lua
index b153ec6d6..9a4c51759 100644
--- a/tex/context/base/node-par.lua
+++ b/tex/context/base/node-par.lua
@@ -88,7 +88,7 @@ end
-- It makes no sense to have a sequence here as we already have
-- pre and post hooks and only one parbuilder makes sense, so no:
--
--- local actions = nodes.tasks.actions("parbuilders",1)
+-- local actions = nodes.tasks.actions("parbuilders")
-- todo: enable one as main
diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua
index 0ed510cd2..5f0c555a6 100644
--- a/tex/context/base/node-pro.lua
+++ b/tex/context/base/node-pro.lua
@@ -29,7 +29,7 @@ local processors = nodes.processors
-- vbox: grouptype: vbox vtop output split_off split_keep | box_type: exactly|aditional
-- hbox: grouptype: hbox adjusted_hbox(=hbox_in_vmode) | box_type: exactly|aditional
-local actions = tasks.actions("processors",4)
+local actions = tasks.actions("processors")
local n = 0
@@ -117,7 +117,7 @@ end
callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter,"all kind of horizontal manipulations (before par break)")
callbacks.register('hpack_filter' , processors.hpack_filter,"all kind of horizontal manipulations")
-local actions = tasks.actions("finalizers",1) -- head, where
+local actions = tasks.actions("finalizers") -- head, where
-- beware, these are packaged boxes so no first_glyph test
-- maybe some day a hash with valid groupcodes
diff --git a/tex/context/base/node-shp.lua b/tex/context/base/node-shp.lua
index e637153dc..75177a4ed 100644
--- a/tex/context/base/node-shp.lua
+++ b/tex/context/base/node-shp.lua
@@ -53,7 +53,7 @@ function nodes.handlers.cleanuppage(head)
return cleanup(head), true
end
-local actions = tasks.actions("shipouts",0) -- no extra arguments
+local actions = tasks.actions("shipouts") -- no extra arguments
function nodes.handlers.finalize(head) -- problem, attr loaded before node, todo ...
return actions(head)
diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua
index d19d1c6ae..7792d1248 100644
--- a/tex/context/base/node-tsk.lua
+++ b/tex/context/base/node-tsk.lua
@@ -6,7 +6,9 @@ if not modules then modules = { } end modules ['node-tsk'] = {
license = "see context related readme files"
}
--- this might move to task-* .. we already have dirty flags there
+-- This might move to task-* and become less code as in sequencers
+-- we already have dirty flags as well. On the other hand, nodes are
+-- rather specialized and here we focus on node related tasks.
local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end)
@@ -18,28 +20,81 @@ local nodes = nodes
nodes.tasks = nodes.tasks or { }
local tasks = nodes.tasks
-tasks.data = allocate()
-local tasksdata = tasks.data
+
+local tasksdata = { } -- no longer public
local sequencers = utilities.sequencers
-function tasks.new(name,list)
- local tasklist = sequencers.reset()
- tasksdata[name] = { list = tasklist, runner = false }
- for l=1,#list do
- sequencers.appendgroup(tasklist,list[l])
+local frozengroups = "no"
+
+function tasks.freeze(kind)
+ frozengroups = kind or "tolerant" -- todo: hook into jobname
+end
+
+function tasks.new(specification) -- was: name,arguments,list
+ local name = specification.name
+ local arguments = specification.arguments or 0
+ local sequence = specification.sequence
+ if name and sequence then
+ local tasklist = sequencers.reset()
+ tasksdata[name] = {
+ list = tasklist,
+ runner = false,
+ arguments = arguments,
+ -- sequence = sequence,
+ frozen = { },
+ }
+ for l=1,#sequence do
+ sequencers.appendgroup(tasklist,sequence[l])
+ end
end
end
-function tasks.restart(name)
+local function valid(name)
+ local data = tasksdata[name]
+ if not data then
+ report_tasks("unknown task %s",name)
+ else
+ return data
+ end
+end
+
+local function validgroup(name,group,what)
local data = tasksdata[name]
+ if not data then
+ report_tasks("unknown task %s",name)
+ else
+ local frozen = data.frozen[group]
+ if frozen then
+ if frozengroup == "no" then
+ -- default
+ elseif frozengroup == "strict" then
+ report_tasks("warning: group %s of task %s is frozen, %s applied but not supported",group,name,what)
+ return
+ else -- if frozengroup == "tolerant" then
+ report_tasks("warning: group %s of task %s is frozen, %s ignored",group,name,what)
+ end
+ end
+ return data
+ end
+end
+
+function tasks.freezegroup(name,group)
+ local data = valid(name)
+ if data then
+ data.frozen[group] = true
+ end
+end
+
+function tasks.restart(name)
+ local data = valid(name)
if data then
data.runner = false
end
end
function tasks.enableaction(name,action)
- local data = tasksdata[name]
+ local data = valid(name)
if data then
sequencers.enableaction(data.list,action)
data.runner = false
@@ -47,7 +102,7 @@ function tasks.enableaction(name,action)
end
function tasks.disableaction(name,action)
- local data = tasksdata[name]
+ local data = valid(name)
if data then
sequencers.disableaction(data.list,action)
data.runner = false
@@ -55,7 +110,7 @@ function tasks.disableaction(name,action)
end
function tasks.enablegroup(name,group)
- local data = tasksdata[name]
+ local data = validgroup(name,"enable group")
if data then
sequencers.enablegroup(data.list,group)
data.runner = false
@@ -63,7 +118,7 @@ function tasks.enablegroup(name,group)
end
function tasks.disablegroup(name,group)
- local data = tasksdata[name]
+ local data = validgroup(name,"disable group")
if data then
sequencers.disablegroup(data.list,group)
data.runner = false
@@ -71,7 +126,7 @@ function tasks.disablegroup(name,group)
end
function tasks.appendaction(name,group,action,where,kind)
- local data = tasksdata[name]
+ local data = validgroup(name,"append action")
if data then
sequencers.appendaction(data.list,group,action,where,kind)
data.runner = false
@@ -79,7 +134,7 @@ function tasks.appendaction(name,group,action,where,kind)
end
function tasks.prependaction(name,group,action,where,kind)
- local data = tasksdata[name]
+ local data = validgroup(name,"prepend action")
if data then
sequencers.prependaction(data.list,group,action,where,kind)
data.runner = false
@@ -87,7 +142,7 @@ function tasks.prependaction(name,group,action,where,kind)
end
function tasks.removeaction(name,group,action)
- local data = tasksdata[name]
+ local data = validgroup(name,"remove action")
if data then
sequencers.removeaction(data.list,group,action)
data.runner = false
@@ -95,7 +150,7 @@ function tasks.removeaction(name,group,action)
end
function tasks.showactions(name,group,action,where,kind)
- local data = tasksdata[name]
+ local data = valid(name)
if data then
report_tasks("task %s, list:\n%s",name,sequencers.nodeprocessor(data.list))
end
@@ -118,9 +173,10 @@ end)
local compile, nodeprocessor = sequencers.compile, sequencers.nodeprocessor
-function tasks.actions(name,n) -- we optimize for the number or arguments (no ...)
+function tasks.actions(name) -- we optimize for the number or arguments (no ...)
local data = tasksdata[name]
if data then
+ local n = data.arguments or 0
if n == 0 then
return function(head)
total = total + 1 -- will go away
@@ -252,9 +308,12 @@ function tasks.table(name) --maybe move this to task-deb.lua
end
end
-tasks.new (
- "processors",
- {
+-- this will move
+
+tasks.new {
+ name = "processors",
+ arguments = 4,
+ sequence = {
"before", -- for users
"normalizers",
"characters",
@@ -263,11 +322,12 @@ tasks.new (
"lists",
"after", -- for users
}
-)
+}
-tasks.new (
- "finalizers",
- {
+tasks.new {
+ name = "finalizers",
+ arguments = 1,
+ sequence = {
"before", -- for users
"normalizers",
-- "characters",
@@ -276,50 +336,55 @@ tasks.new (
"lists",
"after", -- for users
}
-)
+}
-tasks.new (
- "shipouts",
- {
+tasks.new {
+ name = "shipouts",
+ arguments = 0,
+ sequence = {
"before", -- for users
"normalizers",
"finishers",
"after", -- for users
}
-)
+}
-tasks.new (
- "mvlbuilders",
- {
+tasks.new {
+ name = "mvlbuilders",
+ arguments = 1,
+ sequence = {
"before", -- for users
"normalizers",
"after", -- for users
}
-)
+}
-tasks.new (
- "vboxbuilders",
- {
+tasks.new {
+ name = "vboxbuilders",
+ arguments = 5,
+ sequence = {
"before", -- for users
"normalizers",
"after", -- for users
}
-)
+}
---~ tasks.new (
---~ "parbuilders",
---~ {
+--~ tasks.new {
+--~ name = "parbuilders",
+--~ arguments = 1,
+--~ sequence = {
--~ "before", -- for users
--~ "lists",
--~ "after", -- for users
--~ }
---~ )
+--~ }
---~ tasks.new (
---~ "pagebuilders",
---~ {
+--~ tasks.new {
+--~ name = "pagebuilders",
+--~ arguments = 5,
+--~ sequence = {
--~ "before", -- for users
--~ "lists",
--~ "after", -- for users
--~ }
---~ )
+--~ }
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index eb9a58593..c6456b242 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -76,44 +76,27 @@ function regimes.translate(line,regime)
return line
end
--- function regimes.enable(regime)
--- regime = synonyms[regime] or regime
--- if data[regime] then
--- regimes.currentregime = regime
--- local translate = regimes.translate
--- resolvers.filters.install('input',function(s)
--- return translate(s,regime)
--- end)
--- else
--- regimes.disable()
--- end
--- end
---
--- function regimes.disable()
--- regimes.currentregime = "utf"
--- resolvers.filters.install('input',nil)
--- end
-
-local sequencers = utilities.sequencers
+local sequencers = utilities.sequencers
+local textfileactions = resolvers.openers.helpers.textfileactions
function regimes.process(s)
- return translate(s,regimes.currentregime)
+ return regimes.translate(s,regimes.currentregime)
end
function regimes.enable(regime)
regime = synonyms[regime] or regime
if data[regime] then
regimes.currentregime = regime
- sequencers.enableaction(resolvers.openers.textfileactions,"regimes.process")
+ sequencers.enableaction(textfileactions,"regimes.process")
else
- sequencers.disableaction(resolvers.openers.textfileactions,"regimes.process")
+ sequencers.disableaction(textfileactions,"regimes.process")
end
end
function regimes.disable()
regimes.currentregime = "utf"
- sequencers.disableaction(resolvers.openers.textfileactions,"regimes.process")
+ sequencers.disableaction(textfileactions,"regimes.process")
end
-utilities.sequencers.prependaction(resolvers.openers.textfileactions,"system","regimes.process")
-utilities.sequencers.disableaction(resolvers.openers.textfileactions,"regimes.process")
+utilities.sequencers.prependaction(textfileactions,"system","regimes.process")
+utilities.sequencers.disableaction(textfileactions,"regimes.process")
diff --git a/tex/context/base/scrn-int.mkiv b/tex/context/base/scrn-int.mkiv
index 7f143f7b4..eea71debc 100644
--- a/tex/context/base/scrn-int.mkiv
+++ b/tex/context/base/scrn-int.mkiv
@@ -293,7 +293,7 @@
{\hbox to \zeropoint
{\doifassignmentelse{#1}{\getparameters[\??cc][#1]}{\getparameters[\??cc][\c!title=#1,#2]}%
\hskip-\@@ccmargin
- \ctxlua{buffers.set("\v!comment\v!buffer", \!!bs\detokenize{#3}\!!es)}%
+ \ctxlua{buffers.assign("\v!comment\v!buffer", \!!bs\detokenize{#3}\!!es)}%
\struttedbox{\tbox{\doinsertcomment{\v!comment\v!buffer}}\hss}}}%
\ignorespaces}
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index a856f4cab..958d1ad7a 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -1331,7 +1331,7 @@ nodes.builders = nodes.builder or { }
local builders = nodes.builders
-local actions = nodes.tasks.actions("vboxbuilders",5)
+local actions = nodes.tasks.actions("vboxbuilders")
function nodes.builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction)
local done = false
@@ -1359,7 +1359,7 @@ end
-- and we operate on the mlv. Also, we need to do the
-- vspacing last as it removes items from the mvl.
-local actions = nodes.tasks.actions("mvlbuilders",1)
+local actions = nodes.tasks.actions("mvlbuilders")
function nodes.builders.buildpage_filter(groupcode)
starttiming(builders)
diff --git a/tex/context/base/strc-blk.lua b/tex/context/base/strc-blk.lua
index a52fb737a..5040c34e5 100644
--- a/tex/context/base/strc-blk.lua
+++ b/tex/context/base/strc-blk.lua
@@ -101,7 +101,7 @@ function blocks.select(state,name,tag,criterium)
end
function blocks.save(name,tag,buffer) -- wrong, not yet adapted
- local data = buffers.data[buffer]
+ local data = buffers.getcontent(buffer)
local tags = settings_to_set(tag)
local plus, minus = false, false
if tags['+'] then plus = true tags['+'] = nil end
@@ -139,5 +139,5 @@ function blocks.save(name,tag,buffer) -- wrong, not yet adapted
end
end
end
- buffers.data[buffer] = nil
+ buffers.erase(buffer)
end
diff --git a/tex/context/base/strc-ini.lua b/tex/context/base/strc-ini.lua
index 00130b922..88c1d344d 100644
--- a/tex/context/base/strc-ini.lua
+++ b/tex/context/base/strc-ini.lua
@@ -184,7 +184,7 @@ function helpers.title(title,metadata) -- coding is xml is rather old and not th
-- title can contain raw xml
local tag = tags[metadata.kind] or tags.generic
local xmldata = format("<?xml version='1.0'?><%s>%s</%s>",tag,title,tag)
- buffers.set(tag,xmldata)
+ buffers.assign(tag,xmldata)
if trace_processors then
report_processors("putting xml data in buffer: %s",xmldata)
report_processors("processing buffer with setup '%s' and tag '%s'",xmlsetup or "",tag)
diff --git a/tex/context/base/strc-ini.mkiv b/tex/context/base/strc-ini.mkiv
index 8575889e8..b1b11d871 100644
--- a/tex/context/base/strc-ini.mkiv
+++ b/tex/context/base/strc-ini.mkiv
@@ -17,8 +17,6 @@
\unprotect
-% maybe use structurecomponent more consistently as name below
-
% segments: 0:100 2:3 chapter:subsection 3 (=self+2) (alternative: sectionset)
% section : [sectionnumber(s)]
diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv
index 0d2e4a240..dddc771c9 100644
--- a/tex/context/base/strc-mat.mkiv
+++ b/tex/context/base/strc-mat.mkiv
@@ -673,12 +673,12 @@
\dododoformulanumber}
\unexpanded\def\placeformula
- {\doglobal\settrue\insideplaceformula
+ {\global\settrue\insideplaceformula
\settrue\incrementformulanumber
\dodoubleempty\doplaceformula}
\unexpanded\def\placesubformula
- {\doglobal\settrue\insideplacesubformula
+ {\global\settrue\insideplacesubformula
\setfalse\incrementformulanumber
\dodoubleempty\doplaceformula}
diff --git a/tex/context/base/strc-not.mkiv b/tex/context/base/strc-not.mkiv
index 518b97ec6..bd1fe1994 100644
--- a/tex/context/base/strc-not.mkiv
+++ b/tex/context/base/strc-not.mkiv
@@ -19,14 +19,10 @@
% this needs a further cleanup ...
%
-% -- set breakpoin in descriptions
+% -- set breakpoint in descriptions
% -- reset after trialtypesetting
% -- that way we can trick the symbol space
-% obsolete
-
-\let\autopostponenotes\relax
-
% removed:
%
% \pushsomestates
@@ -38,11 +34,6 @@
% saveinsertionbox
% eraseinsertionbackup
% restoreinsertionbackup
-%
-% \def\doprocessnotescs#1#2% #1 == \cs that takes arg
-% {\def\currentnote{#2}\@EA#1\csname\??vn:\currentnote\endcsname}
-% \def\processnotescs#1{\processcommacommand[\noteinsertions]{\doprocessnotescs#1}}
-% \def\noteinsertion #1{\csname\??vn:#1\endcsname}
\def\savenotedata {\writestatus{todo}{save note data}}
\def\restorenotedata {\writestatus{todo}{restore note data}}
@@ -278,10 +269,6 @@
\setupenumerations[\currentnote][]%
\to \everysetupnote
-% \appendtoks
-% \dochecknote
-% \to \everysetupnote
-
\unexpanded\def\setupnote
{\dodoubleempty\dosetupnote}
diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv
index 6fa9348b2..ed0dee7a4 100644
--- a/tex/context/base/strc-sec.mkiv
+++ b/tex/context/base/strc-sec.mkiv
@@ -489,7 +489,7 @@
\c!command=,
#2]%
[#3]%
- \reportcurrentstructure}
+ \reportcurrentstructure}
\unexpanded\def\placeheadtext {\dosingleempty\doplaceheadtext } % use with care
\unexpanded\def\placeheadnumber{\dosingleempty\doplaceheadnumber} % use with care
diff --git a/tex/context/base/supp-fil.lua b/tex/context/base/supp-fil.lua
index 500443f7d..7f573c038 100644
--- a/tex/context/base/supp-fil.lua
+++ b/tex/context/base/supp-fil.lua
@@ -18,6 +18,7 @@ at the <l n='tex'/> side.</p>
local find, gsub, match, format, concat = string.find, string.gsub, string.match, string.format, table.concat
local texcount = tex.count
+local isfile = lfs.isfile
local trace_modules = false trackers.register("modules.loading", function(v) trace_modules = v end)
@@ -28,6 +29,8 @@ local commands = commands
environment = environment or { }
local environment = environment
+local findbyscheme = resolvers.finders.byscheme
+
function commands.checkfilename(str) -- "/whatever..." "c:..." "http://..."
texcount.kindoffile = (find(str,"^/") or find(str,"[%a]:") and 1) or 0
end
@@ -100,62 +103,62 @@ local found = { } -- can best be done in the resolver itself
-- todo: tracing
local function readfilename(specification,backtrack,treetoo)
- local fnd = found[specification]
+ local name = specification.filename
+ local fnd = found[name]
if not fnd then
- local splitspec = resolvers.splitmethod(specification)
- local filename = splitspec.path or ""
- if lfs.isfile(filename) then
- fnd = filename
+ if fnd ~= "" and isfile(name) then
+ fnd = name
end
- if not fnd and backtrack then
- local fname = filename
+ if backtrack and (not fnd or fnd == "") then
+ local fname = name
for i=1,backtrack,1 do
fname = "../" .. fname
- if lfs.isfile(fname) then
+ if isfile(fname) then
fnd = fname
break
end
end
end
if not fnd and treetoo then
---~ fnd = resolvers.findfile(filename)
- fnd = resolvers.findtexfile(filename)
+ fnd = resolvers.findtexfile(name)
end
- found[specification] = fnd
+ found[name] = fnd
end
return fnd or ""
end
-commands.readfilename = readfilename
+function commands.readfilename(filename)
+ return findbyscheme("any",filename)
+end
-function finders.job(filename) return readfilename(filename,nil,false) end -- current path, no backtracking
-function finders.loc(filename) return readfilename(filename,2, false) end -- current path, backtracking
-function finders.sys(filename) return readfilename(filename,nil,true ) end -- current path, obeys tex search
-function finders.fix(filename) return readfilename(filename,2, false) end -- specified path, backtracking
-function finders.set(filename) return readfilename(filename,nil,false) end -- specified path, no backtracking
-function finders.any(filename) return readfilename(filename,2, true ) end -- loc job sys
+function finders.job(specification) return readfilename(specification,false,false) end -- current path, no backtracking
+function finders.loc(specification) return readfilename(specification,2, false) end -- current path, backtracking
+function finders.sys(specification) return readfilename(specification,false,true ) end -- current path, obeys tex search
+function finders.fix(specification) return readfilename(specification,2, false) end -- specified path, backtracking
+function finders.set(specification) return readfilename(specification,false,false) end -- specified path, no backtracking
+function finders.any(specification) return readfilename(specification,2, true ) end -- loc job sys
-openers.job = openers.generic loaders.job = loaders.generic
-openers.loc = openers.generic loaders.loc = loaders.generic
-openers.sys = openers.generic loaders.sys = loaders.generic
-openers.fix = openers.generic loaders.fix = loaders.generic
-openers.set = openers.generic loaders.set = loaders.generic
-openers.any = openers.generic loaders.any = loaders.generic
+openers.job = openers.file loaders.job = loaders.file -- default anyway
+openers.loc = openers.file loaders.loc = loaders.file
+openers.sys = openers.file loaders.sys = loaders.file
+openers.fix = openers.file loaders.fix = loaders.file
+openers.set = openers.file loaders.set = loaders.file
+openers.any = openers.file loaders.any = loaders.file
-function commands.doreadfile(protocol,path,name) -- better do a split and then pass table
- local specification
+function commands.doreadfile(scheme,path,name) -- better do a split and then pass table
+ local fullname
if url.hasscheme(name) then
- specification = name
+ fullname = name
else
- specification = ((path == "") and format("%s:///%s",protocol,name)) or format("%s:///%s/%s",protocol,path,name)
+ fullname = ((path == "") and format("%s:///%s",scheme,name)) or format("%s:///%s/%s",scheme,path,name)
end
- context(resolvers.findtexfile(specification))
+ context(resolvers.findtexfile(fullname)) -- can be more direct
end
--- modules can only have a tex or mkiv suffix or can have a specified one
+-- modules can have a specific suffix or can specify one
-local prefixes = { "m", "p", "s", "x", "t" }
-local suffixes = { "mkiv", "tex" } -- what about cld
+local prefixes = { "m", "p", "s", "x", "v", "t" }
+local suffixes = { "mkiv", "tex", "mkvi" } -- order might change and how about cld
local modstatus = { }
local function usemodule(name,hasscheme)
@@ -172,22 +175,22 @@ local function usemodule(name,hasscheme)
if trace_modules then
report_modules("checking suffix driven file '%s'",name)
end
- foundname = commands.readfilename(name,false,true) or ""
+ foundname = findbyscheme("any",name) or ""
elseif true then
for i=1,#suffixes do
local fullname = file.addsuffix(name,suffixes[i])
if trace_modules then
report_modules("checking suffix driven file '%s'",fullname)
end
- foundname = commands.readfilename(fullname,false,true) or ""
+ foundname = findbyscheme("any",fullname) or ""
if foundname ~= "" then
break
end
end
else
-- -- we don't want a tex file for each mkiv file so we do some checking
- -- local foundtexname = commands.readfilename(file.addsuffix(name,"tex"), false,true) or ""
- -- local foundmkivname = commands.readfilename(file.addsuffix(name,"mkiv"),false,true) or ""
+ -- local foundtexname = readfilename(file.addsuffix(name,"tex"), false,true) or ""
+ -- local foundmkivname = readfilename(file.addsuffix(name,"mkiv"),false,true) or ""
-- if foundtexfile ~= "" and foundmkivfile ~= "" then
-- if file.dirname(foundtexname) == file.dirname(foundmkivname) then
-- foundname = foundtexname -- we assume that this (shared) file loads the mkiv file
diff --git a/tex/context/base/tabl-tab.mkiv b/tex/context/base/tabl-tab.mkiv
index 29e7e86ac..e016c9a24 100644
--- a/tex/context/base/tabl-tab.mkiv
+++ b/tex/context/base/tabl-tab.mkiv
@@ -25,7 +25,9 @@
% and extensions. The documented (and larger) source can be found
% in \type {thrd-tab.tex}.
%
-% Much more can be stripped.
+% Some code has been stripped. Some color has been added. Some macros
+% have been renamed. Registers have been replaces. And probably much
+% more can be cleaned up.
\unprotect
@@ -44,22 +46,22 @@
\def\tablecolumnwidthfactor {10}
\def\tablevspacefactor {2}
\def\tablekernfactor {1}
+\def\tablelinethicknessfactor {4}
\newtoks\everytable
\newtoks\everytableparbox
-\unexpanded\def\tablebeginparbox#1%
+\unexpanded\def\dotablebeginparbox#1%
{\setbox\scratchbox\vtop\bgroup % \setbox added
\hsize#1\relax
\dontcomplain
- \restoretablelineskips
+ \dorestoretablelineskips
\normalbaselines
- \let~\!ttTie
- \let\-\!ttDH
+ \let~\fixedspace
\blank[\v!disable]% % added
\the\everytableparbox}
-\unexpanded\def\tableendparbox
+\unexpanded\def\dotableendparbox
{\removelastskip % itemize or so
\endgraf
\ifnum\prevgraf>\zerocount % we want at least
@@ -82,12 +84,9 @@
\rightskip\zeropoint \!!plus 4em \relax
\to \everytableparbox
-\newtoks\!taTableSpread
+\newskip \tablelefttabskip
+\newskip \tablerighttabskip
-\newskip\tablelefttabskip
-\newskip\tablerighttabskip
-
-\newcount\!taCountA
\newcount\!taColumnNumber
\newcount\!taRecursionLevel % (Initially 0)
@@ -96,20 +95,19 @@
\newdimen\!taDimenC % used by numeric.tex
\newdimen\!taMinimumColumnWidth
-\newtoks\!taToksA
-
-\newtoks\!taPreamble
-\newtoks\!taDataColumnTemplate
-\newtoks\!taRuleColumnTemplate
-\newtoks\!taOldRuleColumnTemplate
-\newtoks\!taLeftGlue
-\newtoks\!taRightGlue
+\newtoks \!taTableSpread
+\newtoks \!taPreamble
+\newtoks \!taDataColumnTemplate
+\newtoks \!taRuleColumnTemplate
+\newtoks \!taOldRuleColumnTemplate
+\newtoks \!taLeftGlue
+\newtoks \!taRightGlue
-\newskip\!taLastRegularTabskip
+\newskip \!taLastRegularTabskip
-\newif\if!taDigit
-\newif\if!taBeginFormat
-\newif\if!taOnceOnlyTabskip
+%newif \if!taDigit
+\newif \if!taBeginFormat
+\newif \if!taOnceOnlyTabskip
\def\!thToksEdef#1=#2%
{\edef\!ttemp{#2}%
@@ -121,29 +119,29 @@
\!thIterate
\let\!thIterate\relax}
-\def\BeginFormat
+\def\dobegintableformat
{\!taPreamble\emptytoks
\!taColumnNumber\zerocount
\skip0=\tableintercolumnspaceunit
\multiply\skip0 \tableintercolumnspacefactor
\divide\skip0 2
- \!taRuleColumnTemplate=\expandafter{\expandafter\tabskip\the\skip0 }%
- \!taLastRegularTabskip=\skip0
+ \!taRuleColumnTemplate\expandafter{\expandafter\tabskip\the\skip0 }%
+ \!taLastRegularTabskip\skip0
\!taOnceOnlyTabskipfalse
\!taBeginFormattrue
\let\!tfRowOfWidths\empty
- \ReadFormatKeys}
+ \doreadtableformatkeys}
\def\!tfSetWidth
- {\ifx\!tfRowOfWidths\empty % true if no prior "w" keys
+ {\ifx\!tfRowOfWidths\empty % true if no prior "w" keys
\ifnum\!taColumnNumber>\zerocount % true if "w" key is to right of first "|"
- \begingroup % RowOfWidths={&\omit || n copies of &\omit&\omit}, where n = number of column to the left of this one
- \!taCountA=1
+ \begingroup % RowOfWidths={&\omit || n copies of &\omit&\omit}, where n = number of column to the left of this one
+ \scratchcounter\plusone
\aftergroup \edef \aftergroup \!tfRowOfWidths \aftergroup {%
\aftergroup &\aftergroup \omit
\!thLoop
- \ifnum \!taCountA<\!taColumnNumber
- \advance\!taCountA 1
+ \ifnum \scratchcounter<\!taColumnNumber
+ \advance\scratchcounter\plusone
\aftergroup \!tfAOAO
\repeat
\aftergroup}%
@@ -160,7 +158,7 @@
\def\!tfSetWidthText[#1]%
{\def\!tfWidthText{#1}%
- \ReadFormatKeys}
+ \doreadtableformatkeys}
\def\!tfSetWidthValue
{\!taMinimumColumnWidth=
@@ -170,7 +168,7 @@
\!tgValue
\fi
\let\!tfWidthText\empty % Override possible prior `w[sample entry]'
- \ReadFormatKeys}
+ \doreadtableformatkeys}
\def\!tfSetTabskip
{\ifnum\!tgCode=\plusone
@@ -190,7 +188,7 @@
\if!taOnceOnlyTabskip\else
\!taLastRegularTabskip=\skip0 % Remember this Tabskip, for possible
\fi % restoration after a subsequent"OnceOnly"
- \ReadFormatKeys}
+ \doreadtableformatkeys}
\def\!tfSetVrule
{\!thToksEdef\!taRuleColumnTemplate
@@ -243,9 +241,8 @@
\!taMinimumColumnWidth\zeropoint
\let\!tfWidthText\empty
\!taOnceOnlyTabskipfalse
- \ReadFormatKeys}
+ \doreadtableformatkeys}
-% UPDATE ROW OF WIDTHS
\def\!tfUpdateRowOfWidths
{\ifx\!tfWidthText\empty \else
\!tfComputeMinColWidth
@@ -272,7 +269,7 @@
\!taBeginFormatfalse
\!ttDoHalign}
-\def\ReFormat[#1]%
+\def\dotablereformat[#1]% will become local
{\omit
\!taDataColumnTemplate{##}%
\!taLeftGlue\emptytoks
@@ -280,12 +277,16 @@
\begingroup
\@@useotherbar
\@@useotherquote
- \expanded{\endgroup\noexpand\ReadFormatKeys#1]}}% appear in a \ReFormat cmd; this is here as a safeguard.
+ \expanded{\endgroup\noexpand\doreadtableformatkeys#1]}}% appear in a \dotablereformat cmd; this is here as a safeguard.
+
+\appendtoks
+ \let\ReFormat\dotablereformat
+\to \everytable
\def\!tfEndReFormat
{\!tfReFormat}
-\appendtoks \TABLEparalignment \to \everytableparbox
+\appendtoks \dotableparalignment \to \everytableparbox
\def\!tfReFormat#1%
{\the \!taLeftGlue
@@ -318,32 +319,35 @@
\expandafter \!tgCheckForDigit
\fi}
+% \def\!tgCheckForDigit
+% {\!taDigitfalse
+% \ifx 0\!ttemp \!taDigittrue
+% \else\ifx 1\!ttemp \!taDigittrue
+% \else\ifx 2\!ttemp \!taDigittrue
+% \else\ifx 3\!ttemp \!taDigittrue
+% \else\ifx 4\!ttemp \!taDigittrue
+% \else\ifx 5\!ttemp \!taDigittrue
+% \else\ifx 6\!ttemp \!taDigittrue
+% \else\ifx 7\!ttemp \!taDigittrue
+% \else\ifx 8\!ttemp \!taDigittrue
+% \else\ifx 9\!ttemp \!taDigittrue
+% \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
+% \if!taDigit
+% \expandafter \!tgGetNumber
+% \else
+% \expandafter \!tgReturn
+% \fi}
+
\def\!tgCheckForDigit
- {\!taDigitfalse
- \ifx 0\!ttemp \!taDigittrue
- \else\ifx 1\!ttemp \!taDigittrue
- \else\ifx 2\!ttemp \!taDigittrue
- \else\ifx 3\!ttemp \!taDigittrue
- \else\ifx 4\!ttemp \!taDigittrue
- \else\ifx 5\!ttemp \!taDigittrue
- \else\ifx 6\!ttemp \!taDigittrue
- \else\ifx 7\!ttemp \!taDigittrue
- \else\ifx 8\!ttemp \!taDigittrue
- \else\ifx 9\!ttemp \!taDigittrue
- \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
- \if!taDigit
- \expandafter \!tgGetNumber
- \else
- \expandafter \!tgReturn
- \fi}
+ {\doifnumberelse\!ttemp\!tgGetNumber\!tgReturn}
-\def\!tgGetNumber {\afterassignment\!tgGetNumberA\!taCountA=}
-\def\!tgGetNumberA{\edef\!tgValue{\the\!taCountA}\!tgReturn}
+\def\!tgGetNumber {\afterassignment\!tgGetNumberA\scratchcounter=}
+\def\!tgGetNumberA{\edef\!tgValue{\the\scratchcounter}\!tgReturn}
\def\!tgSetUpParBox
{\normalexpanded
- {\noexpand \ReadFormatKeys
- b{\tablebeginparbox
+ {\noexpand \doreadtableformatkeys
+ b{\dotablebeginparbox
{\ifnum \!tgCode=1
\ifx \!tgValue\empty
\tablecolumnwidthfactor
@@ -354,7 +358,7 @@
\else
\!tgValue
\fi}}%
- a{\tableendparbox}}}
+ a{\dotableendparbox}}}
\def\!tgInsertKern
{\edef\!ttemp
@@ -370,7 +374,7 @@
\!tgValue
\fi}%
\edef\!ttemp
- {\noexpand\ReadFormatKeys
+ {\noexpand\doreadtableformatkeys
\ifconditional\tablehasleftspacing
b{\!ttemp}
\fi
@@ -379,70 +383,70 @@
\fi}%
\!ttemp}
-\def\NewFormatKey #1{\setvalue{!tk<\string#1>}}
-\def\ReadFormatKeys#1{\getvalue{!tk<\string#1>}}
+\def\newtableformatkey #1{\setvalue{!tk<\string#1>}}
+\def\doreadtableformatkeys#1{\getvalue{!tk<\string#1>}}
% Key "b": b{TOKENS} adds TOKENS to the left of (before) the template
-\NewFormatKey b#1%
+\newtableformatkey b#1%
{\expandafter\!tkJoin\expandafter{\the\!taDataColumnTemplate}{#1}%
- \ReadFormatKeys}
+ \doreadtableformatkeys}
\def\!tkJoin#1#2%
{\!taDataColumnTemplate{#2#1}}%
% Key "a": a{TOKENS} adds TOKENS to the right of (after) the template
-\NewFormatKey a#1%
+\newtableformatkey a#1%
{\!taDataColumnTemplate\expandafter{\the\!taDataColumnTemplate #1}%
- \ReadFormatKeys}
+ \doreadtableformatkeys}
% Key "\{": Enclose template in braces.
-\NewFormatKey \{%
+\newtableformatkey \{%
{\!taDataColumnTemplate=\expandafter{\expandafter{\the\!taDataColumnTemplate}}%
- \ReadFormatKeys}
+ \doreadtableformatkeys}
% Key "*": "*{N}{KEY LETTERS}" is equivalent to specifying
% <KEY LETTERS> N times.
% KEY LETTERS may contain further * specifications
-\NewFormatKey *#1#2%
- {\!taCountA=#1\relax
- \!taToksA={}%
+\newtableformatkey *#1#2%
+ {\scratchcounter=#1\relax
+ \scratchtoks\emptytoks
\!thLoop
- \ifnum \!taCountA > 0
- \!taToksA\expandafter{\the\!taToksA #2}%
- \advance\!taCountA -1
+ \ifnum\scratchcounter>\zerocount
+ \scratchtoks\expandafter{\the\scratchtoks#2}%
+ \advance\scratchcounter\minusone
\repeat
- \expandafter\ReadFormatKeys\the\!taToksA}
+ \expandafter\doreadtableformatkeys\the\scratchtoks}
% Key "\LeftGlue": Specifies the glue (usually \hfil, or nothing) to be
% added to extreme left of the template to position a column
-\NewFormatKey \LeftGlue#1%
+\newtableformatkey \LeftGlue#1%
{\!taLeftGlue{#1}%
- \ReadFormatKeys}
+ \doreadtableformatkeys}
-\NewFormatKey \RightGlue#1%
+\newtableformatkey \RightGlue#1%
{\!taRightGlue{#1}%
- \ReadFormatKeys}
+ \doreadtableformatkeys}
-\NewFormatKey c%
+\newtableformatkey c%
{\prependtoks\raggedcenter\to\!taDataColumnTemplate
- \ReadFormatKeys \LeftGlue\hfil \RightGlue\hfil}
+ \doreadtableformatkeys \LeftGlue\hfil \RightGlue\hfil}
-\NewFormatKey l%
+\newtableformatkey l%
{\prependtoks\raggedright\to\!taDataColumnTemplate
- \ReadFormatKeys \LeftGlue\empty \RightGlue\hfil}
+ \doreadtableformatkeys \LeftGlue\empty \RightGlue\hfil}
-\NewFormatKey r%
+\newtableformatkey r%
{\prependtoks\raggedleft\to\!taDataColumnTemplate
- \ReadFormatKeys \LeftGlue\hfil \RightGlue\empty}
+ \doreadtableformatkeys \LeftGlue\hfil \RightGlue\empty}
-\NewFormatKey x%
+\newtableformatkey x%
{\prependtoks\notragged\to\!taDataColumnTemplate
- \ReadFormatKeys \LeftGlue\hfil \RightGlue\empty}
+ \doreadtableformatkeys \LeftGlue\hfil \RightGlue\empty}
% Key "k": Adds kerns to left and right of "#"
% This key and the two below use Plain TeX's \if@h as if it were \if@left,
@@ -450,83 +454,83 @@
% so even in the unlikely circumstance that a \phantom is currently under
% construction, there's no problem.
-\NewFormatKey k%
+\newtableformatkey k%
{\settrue\tablehasleftspacing
\settrue\tablehasrightspacing
\!tgGetValue{\!tgInsertKern}}
% Key "i": Adds a kern to the left of "#"
-\NewFormatKey i%
+\newtableformatkey i%
{\settrue\tablehasleftspacing
\setfalse\tablehasrightspacing
\!tgGetValue{\!tgInsertKern}}
% Key "j": Adds a kern to the right of "#"
-\NewFormatKey j%
+\newtableformatkey j%
{\setfalse\tablehasleftspacing
\settrue\tablehasrightspacing
\!tgGetValue{\!tgInsertKern}}
% Key "n": numeric item , non-math mode.
-\NewFormatKey n%
+\newtableformatkey n%
{\def\!tnStyle{}%
\futurelet\!tnext\!tnTestForBracket}
% Key "N": numeric item, math mode.
-\NewFormatKey N%
+\newtableformatkey N%
{\def\!tnStyle{$}%
\futurelet\!tnext\!tnTestForBracket}
% Key "m": Math mode.
-\NewFormatKey m%
- {\ReadFormatKeys b$ a$}
+\newtableformatkey m%
+ {\doreadtableformatkeys b$ a$}
% Key "M": Displaymath mode.
-\NewFormatKey M%
- {\ReadFormatKeys \{ b{$\displaystyle} a$}
+\newtableformatkey M%
+ {\doreadtableformatkeys \{ b{$\displaystyle} a$}
% Key "\m": Template ${}#\hfil$
-\NewFormatKey \m%
- {\ReadFormatKeys l b{{}} m}
+\newtableformatkey \m%
+ {\doreadtableformatkeys l b{{}} m}
% Key "\M": Template $\displaystyle{{}#\hfil}$
-\NewFormatKey \M%
- {\ReadFormatKeys l b{{}} M}
+\newtableformatkey \M%
+ {\doreadtableformatkeys l b{{}} M}
% Key "f": Set font (E.g., f\it sets up italic font (assuming \it
% has its usual meaning)
-\NewFormatKey f#1%
- {\ReadFormatKeys b{#1}}
+\newtableformatkey f#1%
+ {\doreadtableformatkeys b{#1}}
-\NewFormatKey B{\ReadFormatKeys f\bf} % Key "B": abbreviation for f\bf
-\NewFormatKey I{\ReadFormatKeys f\it} % Key "I": abbreviation for f\it
-\NewFormatKey S{\ReadFormatKeys f\sl} % Key "S": abbreviation for f\sl
-\NewFormatKey R{\ReadFormatKeys f\rm} % Key "R": abbreviation for f\rm
-\NewFormatKey T{\ReadFormatKeys f\tt} % Key "T": abbreviation for f\tt
+\newtableformatkey B{\doreadtableformatkeys f\bf} % Key "B": abbreviation for f\bf
+\newtableformatkey I{\doreadtableformatkeys f\it} % Key "I": abbreviation for f\it
+\newtableformatkey S{\doreadtableformatkeys f\sl} % Key "S": abbreviation for f\sl
+\newtableformatkey R{\doreadtableformatkeys f\rm} % Key "R": abbreviation for f\rm
+\newtableformatkey T{\doreadtableformatkeys f\tt} % Key "T": abbreviation for f\tt
% Key "p": ParBox
-\NewFormatKey p%
+\newtableformatkey p%
{\!tgGetValue{\!tgSetUpParBox}}
% Key "w": minimum column width
-\NewFormatKey w%
+\newtableformatkey w%
{\!tkTestForBeginFormat w{\!tgGetValue{\!tfSetWidth}}}
% Key "s": Set tabskip for the inter-column space to the right
% of the current column, and all subsequent spaces, until overriden
% by a new "s" or "o" key.
-\NewFormatKey s%
+\newtableformatkey s%
{\!taOnceOnlyTabskipfalse % in case same column has a prior "o" key
\!tkTestForBeginFormat t{\!tgGetValue{\!tfSetTabskip}}}
@@ -534,70 +538,68 @@
% inter-column space just to the right of this column; restore the
% the previous \tabskip for subsequent columns.
-\NewFormatKey o%
+\newtableformatkey o%
{\!taOnceOnlyTabskiptrue
\!tkTestForBeginFormat o{\!tgGetValue{\!tfSetTabskip}}}
% Key "|": Standard rule column designator
-\NewFormatKey |%
+\newtableformatkey |%
{\!tkTestForBeginFormat |{\!tgGetValue{\!tfSetVrule}}}
% Key "\|": Non-standard rule column designator
-\NewFormatKey \|%
+\newtableformatkey \|%
{\!tkTestForBeginFormat \|{\!tfSetAlternateVrule}}
-% Key ".": PERIOD -- end of \BeginFormat section.
+% Key ".": PERIOD -- end of \dobegintableformat section.
-\NewFormatKey .%
+\newtableformatkey .%
{\!tkTestForBeginFormat.{\!tfFinishFormat}}
-% Key "\EndFormat": Equivalent to "."
+% Key "\doendtableformat": Equivalent to "."
-\NewFormatKey \EndFormat
- {\!tkTestForBeginFormat\EndFormat{\!tfFinishFormat}}
+\newtableformatkey \doendtableformat
+ {\!tkTestForBeginFormat\doendtableformat{\!tfFinishFormat}}
-% Key "]": End of \ReFormat section
+% Key "]": End of \dotablereformat section
-\NewFormatKey ]%
+\newtableformatkey ]%
{\!tkTestForReFormat ] \!tfEndReFormat}
% TEST FOR BEGIN FORMAT{<Key>}{Intended Action}: This test is run
-% on keys that can only be used by \BeginFormat --- "s", "o",
-% "|", "\|", "w", ".", and "\EndFormat".
+% on keys that can only be used by \dobegintableformat --- "s", "o",
+% "|", "\|", "w", ".", and "\doendtableformat".
\def\!tkTestForBeginFormat#1#2%
{\if!taBeginFormat
\def\!ttemp{#2}%
- \expandafter \!ttemp
+ \expandafter\!ttemp
\else
\toks0={#1}%
- \toks2=\expandafter{\string\ReFormat}%
- \expandafter \!tkImproperUse
+ \toks2=\expandafter{\string\dotablereformat}%
+ \expandafter\!tkImproperUse
\fi}
% TEST FOR RE FORMAT{<Key>}{Intended Action}: This test is run
-% on the key "]", which can only be used by \ReFormat.
+% on the key "]", which can only be used by \dotablereformat.
\def\!tkTestForReFormat#1#2%
{\if!taBeginFormat
\toks0={#1}%
- \toks2=\expandafter{\string\BeginFormat}%
- \expandafter \!tkImproperUse
+ \toks2=\expandafter{\string\dobegintableformat}%
+ \expandafter\!tkImproperUse
\else
\def\!ttemp{#2}%
- \expandafter \!ttemp
+ \expandafter\!ttemp
\fi}
% NOTE: THE SPACE BETWEEN A NUMERIC ENTRY AND THE FOLLOWING '|', '"',
% OR '\|' IS MANDATORY.
% EMPTY NUMERIC ENTRIES ARE NOT ALLOWED: USE '{}' OR '\omit' INSTEAD.
-% TEST FOR BRACKET: Invoked by the keys "n" and "N".
-
\def\!tnTestForBracket
- {\ifx [\!tnext
+ {\ifx[\!tnext
\expandafter\!tnGetArgument
\else
\expandafter\!tnGetCode
@@ -615,20 +617,20 @@
{\begingroup
\aftergroup\edef \aftergroup\!ttemp \aftergroup{%
\aftergroup[%
- \!taCountA #1
+ \scratchcounter#1\relax
\!thLoop
- \ifnum \!taCountA>0
- \advance\!taCountA -1
- \aftergroup0
+ \ifnum \scratchcounter>\zerocount
+ \advance\scratchcounter\minusone
+ \aftergroup0
\repeat
\def\!ttemp{#3}%
\ifx\!ttemp \empty
\else
\aftergroup.
- \!taCountA #2
+ \scratchcounter#2\relax
\!thLoop
- \ifnum \!taCountA>0
- \advance\!taCountA -1
+ \ifnum \scratchcounter>\zerocount
+ \advance\scratchcounter\minusone
\aftergroup0
\repeat
\fi
@@ -646,19 +648,19 @@
\def\!tnMakeNumericTemplate#1#2.#3.#4!% #1=<empty> or $
{\def\!ttemp{#4}%
\ifx\!ttemp\empty
- \!taDimenC=0pt
+ \!taDimenC\zeropoint
\else
- \setbox0=\hbox{\mathsurround\zeropoint #1.#3#1}%
- \!taDimenC=\wd0
+ \setbox\scratchbox=\hbox{\mathsurround\zeropoint #1.#3#1}%
+ \!taDimenC\wd\scratchbox
\fi
- \setbox0 =\hbox{\mathsurround\zeropoint #1#2#1}%
+ \setbox\scratchbox\hbox{\mathsurround\zeropoint #1#2#1}%
\!thToksEdef\!taDataColumnTemplate={%
\noexpand\!tnSetNumericItem
- {\the\wd0 }%
+ {\the\wd\scratchbox}%
{\the\!taDimenC}%
{#1}%
\the\!taDataColumnTemplate} % Might have tabskip glue in here
- \ReadFormatKeys}
+ \doreadtableformatkeys}
% SET NUMERIC ITEM
@@ -672,11 +674,11 @@
% extensions
-\NewFormatKey q%
+\newtableformatkey q%
{\letempty\!tqStyle
\futurelet\!tnext\!tqTestForBracket}
-\NewFormatKey Q%
+\newtableformatkey Q%
{\def\!tqStyle{$}%
\futurelet\!tnext\!tqTestForBracket}
@@ -696,20 +698,20 @@
\aftergroup\!ttemp
\aftergroup{%
\aftergroup[%
- \!taCountA #1
+ \scratchcounter#1\relax
\!thLoop
- \ifnum \!taCountA>0 % \zerocount
- \advance\!taCountA \minusone
+ \ifnum \scratchcounter>\zerocount
+ \advance\scratchcounter\minusone
\aftergroup0
\repeat
\def\!ttemp{#3}%
\ifx\!ttemp\empty
\else
\aftergroup,
- \!taCountA #2
+ \scratchcounter#2\relax
\!thLoop
- \ifnum\!taCountA>0 % \zerocount
- \advance\!taCountA \minusone
+ \ifnum\scratchcounter>\zerocount
+ \advance\scratchcounter\minusone
\aftergroup0
\repeat
\fi
@@ -725,14 +727,14 @@
\ifx\!ttemp\empty
\!taDimenC\zeropoint
\else
- \setbox0\hbox{\mathsurround\zeropoint #1,#3#1}%
- \!taDimenC\wd0
+ \setbox\scratchbox\hbox{\mathsurround\zeropoint #1,#3#1}%
+ \!taDimenC\wd\scratchbox
\fi
- \setbox0\hbox{\mathsurround\zeropoint #1#2#1}%
+ \setbox\scratchbox\hbox{\mathsurround\zeropoint #1#2#1}%
\!thToksEdef\!taDataColumnTemplate
- ={\noexpand\!tqSetQuantityItem{\the\wd0 }{\the\!taDimenC}{#1}%
+ ={\noexpand\!tqSetQuantityItem{\the\wd\scratchbox}{\the\!taDimenC}{#1}%
\the\!taDataColumnTemplate}%
- \ReadFormatKeys}
+ \doreadtableformatkeys}
\def\!tqSetQuantityItem #1#2#3#4 %
{\!tqSetQuantityItemA{#1}{#2}{#3}#4,,!}
@@ -744,29 +746,29 @@
% struts
-\def\tableMakeStrut#1#2%
+\def\domaketablestrut#1#2%
{\vrule\!!width\zeropoint\!!height#1\!!depth#2\relax}
-\def\tableStandardTableStrut
- {\tableMakeStrut
+\def\domakestandardtablestrut
+ {\domaketablestrut
{\tablestrutheightfactor\tablestrutunit}
{\tablestrutdepthfactor \tablestrutunit }}
-\def\tableAugmentedTableStrut#1#2%
- {\tableMakeStrut
+\def\domakeaugmentedtablestrut#1#2%
+ {\domaketablestrut
{\dimexpr\tablestrutheightfactor\tablestrutunit+#1\tablestrutunit\relax}
{\dimexpr\tablestrutdepthfactor \tablestrutunit+#2\tablestrutunit\relax}}
\appendtoks
- \let\MakeStrut \tableMakeStrut
- \let\StandardTableStrut \tableStandardTableStrut
- \let\AugmentedTableStrut\tableAugmentedTableStrut
+ \let\MakeStrut \domaketablestrut
+ \let\StandardTableStrut \domakestandardtablestrut
+ \let\AugmentedTableStrut\domakeaugmentedtablestrut
\to \everytable
% \Enlarge<extra height><extra depth><original>
% \enlarge<multiple for extra height><multiple for extra depth><original>
-\def\tableEnlarge#1#2%
+\def\dotableEnlarge#1#2%
{% 3rd argument is picked up later
% #1=extra height
% #2=extra depth
@@ -801,32 +803,21 @@
\dp\zerocount\dimen@
\box\zerocount \!TsSpaceFactor{}}
-\def\tableenlarge#1#2% 3rd argument is picked up later
- {\tableEnlarge{#1\tablestrutunit}{#2\tablestrutunit}}
+\def\dotableenlarge#1#2% 3rd argument is picked up later
+ {\dotableEnlarge{#1\tablestrutunit}{#2\tablestrutunit}}
\appendtoks
- \let\enlarge\tableenlarge
- \let\Enlarge\tableEnlarge
+ \let\enlarge\dotableenlarge
+ \let\Enlarge\dotableEnlarge
\to \everytable
% BEGIN TABLE
-\def\tablestandardbegin
- {\futurelet\!tnext\!ttBeginTable}
-
-\def\!ttBeginTable
- {\ifx[\!tnext
- \def\!tnext{\!ttBeginTableA}%
- \else
- \def\!tnext{\!ttBeginTableA[c]}%
- \fi
- \!tnext}
-
-\def\!ttBeginTableA[#1]%
- {\if#1u% % "unboxed" table
+\def\dotablestandardbegin[#1]% \!ttBeginTable (always argument)
+ {\if#1u% unboxed table
\ifmmode
\def\!ttEndTable{\relax}% user had better be in display math mode and have only one table at the outer level
- \else % user had better be in vertical mode
+ \else % user had better be in vertical mode
\bgroup
\def\!ttEndTable{\egroup}%
\fi
@@ -854,11 +845,9 @@
\bgroup \catcode`\~=13
\appendtoks
- \let\!ttTie=~%
- \let\!ttDH =\-%
\catcode`\~=\active
\def~{\kern.5em}%
- \def\\{\par}%
+ \def\\{\ifhmode\space\else\par\fi}%
\to \everytable
\egroup
@@ -868,7 +857,7 @@
% DO HALIGN: Invoked by END FORMAT (or the key ".")
\def\!ttDoHalign
- {\edef\restoretablelineskips
+ {\edef\dorestoretablelineskips
{\baselineskip \the\baselineskip
\lineskiplimit\the\lineskiplimit
\lineskip \the\lineskip
@@ -889,7 +878,7 @@
% END TABLE
-\def\tablenormalend
+\def\dotablenormalend
{\egroup % finishes the \halign
\!ttEndTable}% closes off the table envirnoment set up by \tablestandardbegin
@@ -923,11 +912,11 @@
{\cr}
\def\!ttDoPlus#1#2#3% #1 eats the +
- {\tableAugmentedTableStrut{#2}{#3}%
+ {\domakeaugmentedtablestrut{#2}{#3}%
\cr}
\def\!ttDoStandard
- {\tableStandardTableStrut
+ {\domakestandardtablestrut
\cr}
% ALTERNATE VRULE
@@ -977,14 +966,15 @@
\def\@VLd{.125em}
\let\tablecurrentvrulecolor\empty
+\let\tablecurrenthrulecolor\empty
\def\do!ttInsertVrule % will be merged in 2005
{\vrule\!!width
\ifnum\!tgCode=\plusone
\ifx\!tgValue\empty
- \tablevrulethicknessfactor
+ \tablevrulethicknessfactor
\else
- \!tgValue
+ \!tgValue
\fi
\tablelinethicknessunit
\else
@@ -1028,13 +1018,13 @@
\def\!ttuse#1%
{\ifnum#1>\plusone
\omit
- \global\TABLEdivisionfalse
- \scratchcounter\currentTABLEcolumn % added
+ \global\setfalse\istabledivision % added
+ \scratchcounter\currenttablecolumn % added
\advance\scratchcounter #1% % added
\advance\scratchcounter \minusone % added
\def\next % added
- {\global\advance\currentTABLEcolumn #1% % added
- \global\advance\currentTABLEcolumn \minusone % added
+ {\global\advance\currenttablecolumn #1% % added
+ \global\advance\currenttablecolumn \minusone % added
\scratchcounter#1% \mscount is in Plain
\advance\scratchcounter \minusone
\advance\scratchcounter \scratchcounter
@@ -1043,19 +1033,19 @@
\spanomit \advance\scratchcounter\minusone
\repeat
\span}%
- \else % added
- \def\next % conflicts with possible next \omit % added
- {\global\advance\currentTABLEcolumn \plusone}% % added
+ \else % added
+ \def\next % conflicts with possible next \omit % added
+ {\global\advance\currenttablecolumn \plusone}%% added
\fi
- \next} % added
+ \next} % added
\def\!ttUse#1[%
{\!ttuse{#1}%
- \ReFormat[}
+ \dotablereformat[}
\appendtoks
- \let\use \!ttuse
- \let\Use \!ttUse
+ \let\use\!ttuse
+ \let\Use\!ttUse
\to \everytable
% rules
@@ -1108,37 +1098,37 @@
% \null prevents \unskip
-\def\tableLeft #1{#1\hfill\null}
-\def\tableCenter#1{\hfill#1\hfill\null}
-\def\tableRight #1{\hfill#1}
+\def\dotableLeft #1{#1\hfill\null}
+\def\dotableCenter#1{\hfill#1\hfill\null}
+\def\dotableRight #1{\hfill#1}
-\def\tableOpenUp#1#2%
+\def\dotableOpenUp#1#2%
{\edef\tablestrutheightfactor{\withoutpt\the\dimexpr\tablestrutheightfactor\points+#1\points}%
\edef\tablestrutdepthfactor {\withoutpt\the\dimexpr\tablestrutdepthfactor \points+#2\points}}
-\def\tableSetTableToWidth#1%
+\def\dotableSetTableToWidth#1%
{\doifelsenothing{#1}{\!taTableSpread\emptytoks}{\!taTableSpread{to #1}}}
-\def\tableWidenTableBy#1%
+\def\dotableWidenTableBy#1%
{\ifdim#1=\zeropoint
\!taTableSpread\emptytoks
\else
\!taTableSpread{spread #1}%
\fi}
-\def\tableExpand
- {\tableSetTableToWidth{\hsize}}
+\def\dotableExpand
+ {\dotableSetTableToWidth{\hsize}}
-\def\tableLongLines
+\def\dotableLongLines
{\tablelefttabskip \zeropoint \!!plus 1fill
\tablerighttabskip\tablelefttabskip
- \tableExpand}
+ \dotableExpand}
-\def\tableJustLeft {\omit\let\!ttRightGlue\hfill}
-\def\tableJustCenter{\omit\hfill\null\let\!ttRightGlue\hfill}
-\def\tableJustRight {\omit\hfill\null}
+\def\dotableJustLeft {\omit\let\!ttRightGlue\hfill}
+\def\dotableJustCenter{\omit\hfill\null\let\!ttRightGlue\hfill}
+\def\dotableJustRight {\omit\hfill\null}
-\def\tableSmash
+\def\dotableSmash
{\relax
\ifmmode
\expandafter\mathpalette
@@ -1158,11 +1148,11 @@
\def\!thFinishVCS
{\vbox to\zeropoint{\vss\box\zerocount\vss}}
-\def\tableRaise
+\def\dotableRaise
{\def\!thSign{+}%
\!tgGetValue\!thSetDimen}
-\def\tableLower
+\def\dotableLower
{\def\!thSign{-}%
\!tgGetValue\!thSetDimen}
@@ -1199,7 +1189,7 @@
\dp\zerocount\zeropoint
\box\zerocount}
-\def\tableBackSpace
+\def\dotableBackSpace
{\!tgGetValue\!thKernBack}
\def\!thKernBack
@@ -1216,7 +1206,7 @@
\fi
\ignorespaces}
-\def\tableVspace
+\def\dotableVspace
{\noalign
\bgroup
\!tgGetValue\!thVspace}
@@ -1236,26 +1226,24 @@
\egroup} % Ends the \noalign
\appendtoks
- \let\SetTableToWidth\tableSetTableToWidth
- \let\WidenTableBy \tableWidenTableBy
- \let\Expand \tableExpand
- \let\LongLines \tableLongLines
- \let\JustLeft \tableJustLeft
- \let\JustCenter \tableJustCenter
- \let\JustRight \tableJustRight
- \let\Smash \tableSmash
- \let\Raise \tableRaise
- \let\Lower \tableLower
- \let\BackSpace \tableBackSpace
- \let\Vspace \tableVspace
- \let\OpenUp \tableOpenUp
- \let\tableLeft \tableLeft
- \let\tableCenter \tableCenter
- \let\tableRight \tableRight
+ \let\SetTableToWidth\dotableSetTableToWidth
+ \let\WidenTableBy \dotableWidenTableBy
+ \let\Expand \dotableExpand
+ \let\LongLines \dotableLongLines
+ \let\JustLeft \dotableJustLeft
+ \let\JustCenter \dotableJustCenter
+ \let\JustRight \dotableJustRight
+ \let\Smash \dotableSmash
+ \let\Raise \dotableRaise
+ \let\Lower \dotableLower
+ \let\BackSpace \dotableBackSpace
+ \let\Vspace \dotableVspace
+ \let\OpenUp \dotableOpenUp
+ \let\TableLeft \dotableLeft
+ \let\TableCenter \dotableCenter
+ \let\TableRight \dotableRight
\to \everytable
-\protect
-
%D We felt no need to write our own table building macros,
%D simply because Michael Wichura made a terrific one. This
%D package is quite complete and well documented. In \CONTEXT\
@@ -1267,8 +1255,6 @@
%D mostly global. Now, let's start with loading the main
%D macros.
-\unprotect
-
%D \macros
%D {inintable, ifsplittables}
%D
@@ -1477,64 +1463,61 @@
%D track of these specifications is taken care of by the next
%D variables:
-\newif\ifTABLEinbreak
-
-%D Nog vervangen:
-
-\def\c!Table{Table}
+\def\s!TaBlE{TaBlE}
\def\m!TABLE{TABLE}
-
+
%D We already saw that the table macros report errors and
%D provide automatic spacing. These features can only be
%D implemented by keeping track of the state, often the last
%D command on a row.
-\setnewconstant\TABLEunknown 0
+\newconstant\tableforcestate
+\newconstant\tableactionstate
-\setnewconstant\TABLEseparaterow 1
-\setnewconstant\TABLEfirstrow 2
-\setnewconstant\TABLEmidrow 3
-\setnewconstant\TABLElastrow 4
-\setnewconstant\TABLErule 5
-\setnewconstant\TABLEskip 6
-\setnewconstant\TABLEautorow 7
+\setnewconstant\tableunknownstate 0
-\setnewconstant\TABLEforcefirstrow 1
-\setnewconstant\TABLEforcelastrow 2
+\setnewconstant\tableseparaterowstate 1
+\setnewconstant\tablefirstrowstate 2
+\setnewconstant\tablemidrowstate 3
+\setnewconstant\tablelastrowstate 4
+\setnewconstant\tablerulestate 5
+%setnewconstant\tableskipstate 6
+%setnewconstant\tableautorowstate 7
-\newconstant\TABLErowfactor
+\setnewconstant\tableforcefirstrowstate 1
+\setnewconstant\tableforcelastrowstate 2
+
+\newconstant\tablerowfactor
\newconstant\TABLEendofrowdepth
\newconstant\TABLEendofrowheight
\newconstant\TABLEcr
-\newconstant\TABLErowzero
+\newconstant\tablerowzero
\newconstant\TABLEn
-\newconstant\TABLEforce
-\newconstant\TABLEaction
%D We store these states using \type {constants}'s and
%D like most variables, these are global ones. When needed,
%D especially when we flush the backgrounds, we can temporary
%D disable the assignment.
-\newif\ifsetTABLEaction
+\newconditional\tableactionstatepermitted
-\def\setTABLEaction#1{\ifsetTABLEaction\global\TABLEaction#1\fi}
-\def\setTABLEforce #1{\ifsetTABLEaction\global\TABLEforce #1\fi}
+\def\dosettableaction#1{\ifconditional\tableactionstatepermitted\global\tableactionstate#1\fi}
+\def\dosettableforce #1{\ifconditional\tableactionstatepermitted\global\tableforcestate #1\fi}
%D To keep things verbose, as well as to show what \TABLE\
%D commands we affect, we show some meanings.
-\def\normalTABLEshortrule {\!ttShortHrule} % \-
-\def\normalTABLElongrule {\!ttLongHrule} % \=
-\def\normalTABLEfullrule {\!ttFullHrule} % \_
-\def\normalTABLEendofrow {\!ttEndOfRow} % \\
-\def\normalTABLEsimplebar {\unskip\!ttRightGlue&&} % |
-\def\normalTABLEcomplexbar {\unskip\!ttRightGlue&\omit\!ttAlternateVrule} % \|
-\def\normalTABLEquote {\unskip\!ttRightGlue&\omit&} % "
-\def\normalTABLElineformat {\normalTABLEendofrow+}
-\def\normalTABLElineending {\normalTABLEendofrow0 }
-\def\normalTABLEsinglerule {&\normalTABLElongrule&}
-\def\normalTABLEmultirule#1{&\use{#1}\normalTABLElongrule&}
+\def\donormaltableshortrule {\!ttShortHrule}
+\def\donormaltablelongrule {\!ttLongHrule}
+\def\donormaltablefullrule {\!ttFullHrule}
+\def\donormaltableendofrow {\!ttEndOfRow}
+\def\donormaltablesimplebar {\unskip\!ttRightGlue&&}
+\def\donormaltablecomplexbar #1{\unskip\!ttRightGlue&\omit\expandafter\!ttAlternateVrule#1 }
+\def\donormaltablequote {\unskip\!ttRightGlue&\omit&}
+\def\donormaltablelineformat {\donormaltableendofrow+}
+\def\donormaltablelineending {\donormaltableendofrow0 }
+\def\donormaltablesinglerule {&\donormaltablelongrule&}
+\def\donormaltablemultirule #1{&\use{#1}\donormaltablelongrule&}
%D To give an impression of what the (well documented) source
%D of \TABLE\ looks like, we first implement an alternative for
@@ -1569,13 +1552,6 @@
%D support the \type {l}, \type {c} and \type {r} keys for
%D paragraph entries.
-%D We already showed the next one, but here we slightly adapt
-%D the macro by adding an \type{\expandafter}. The space after
-%D \type{#1} is crucial!
-
-\def\normalTABLEcomplexbar#1%
- {\unskip\!ttRightGlue&\omit\expandafter\!ttAlternateVrule#1 }
-
%D All commands that are executed between rows are to be put in
%D \type {\noalign}. We can however not verify if we (that is
%D \TABLE) does or did not enter this mode. A moderate dirty
@@ -1584,20 +1560,17 @@
%D such hacks become a second nature. However, redefining \type
%D {\omit} and \type{\span} is not that easy.}
-\def\doTABLEnoalign
- {\let\next=}
-
-\def\TABLEnoalign
+\def\tablenoalign
{\noalign
\bgroup
\let\noalign\relax
- \let\TABLEnoalign\doTABLEnoalign
- \doTABLEnoalign}
+ \let\tablenoalign\relax
+ \let\next=}
-\def\startTABLEnoalign
- {\TABLEnoalign\bgroup}
+\def\starttablenoalign
+ {\tablenoalign\bgroup}
-\def\stopTABLEnoalign
+\def\stoptablenoalign
{\iffalse{\else}\fi}
%D \macros
@@ -1637,7 +1610,7 @@
{\ifinsidefloat\else\startbaselinecorrection\fi}
{\startframedcontent[\@@tiframe]}%
\postponenotes
- \firststagestartTABLE}}
+ \firststagestarttable}}
% We cannot define \unexpanded\def\dostoptable a ssomehow lookahead
% in alignments fail then, so we relax it and define it locally.
@@ -1645,11 +1618,13 @@
\let\stoptabel\relax
\def\dostoptable
- {\chuckTABLEautorow % before the tail, else noalign problem
- \insertTABLEtail
- \TABLEnoalign{\globalletempty\@@TABLEhead}%
- \TABLEnoalign{\globalletempty\@@TABLEtail}%
- \finishTABLE
+ {\dochucktableautorow % before the tail, else noalign problem
+ \doinserttabletail
+ \starttablenoalign
+ \globalletempty\dotablehead
+ \globalletempty\dotabletail
+ \stoptablenoalign
+ \dofinishtable
\doifelsenothing\@@tiframe
{\ifinsidefloat\else
\stopbaselinecorrection
@@ -1661,14 +1636,14 @@
%D Before we can grab the argument, we have to make sure that
%D the \CATCODES\ are set. The first stage takes care of that.
-\def\firststagestartTABLE
+\def\firststagestarttable
{\bgroup % kan-ie weg?
\global\intabletrue
%catcode`\|=\othercatcode
- \complexorsimple\secondstagestartTABLE}
+ \complexorsimple\secondstagestarttable}
-\def\simplesecondstagestartTABLE#1%
- {\complexsecondstagestartTABLE[{#1}]}
+\def\simplesecondstagestarttable#1%
+ {\complexsecondstagestarttable[{#1}]}
%D \macros
%D {definetabletemplate}
@@ -1696,14 +1671,14 @@
\def\dodefinetabletemplate[#1][#2][#3][#4]%
{\ifsecondargument
- \setgvalue{\c!Table#1}{\douseTABLEtemplate{#2}{#3}{#4}}%
+ \setgvalue{\s!TaBlE#1}{\dousetabletemplate{#2}{#3}{#4}}%
\fi
\egroup}
-\def\douseTABLEtemplate#1#2#3%
- {\gdef\TABLEhead{\getvalue{@@TABLEhead#2}}%
- \gdef\TABLEtail{\getvalue{@@TABLEtail#3}}%
- \complexsecondstagestartTABLE[#1]}
+\def\dousetabletemplate#1#2#3%
+ {\gdef\dotablehead{\getvalue{@@tablehead#2}}%
+ \gdef\dotabletail{\getvalue{@@tabletail#3}}%
+ \complexsecondstagestarttable[#1]}
%D The optional third and fourth arguments define which table
%D head and tail to use.
@@ -1729,46 +1704,65 @@
%D The second half of the next macro prepares table
%D splitting.
-\def\insertTABLEhead
- {\TABLEnoalign{\global\settrue \preventTABLEbreak \global\setfalse\someTABLEhead}%
- \TABLEhead
- \TABLEnoalign{\global\setfalse\preventTABLEbreak}}
-
-\def\insertTABLEtail
- {\TABLEnoalign{\global\settrue \preventTABLEbreak \global\setfalse\someTABLEtail}%
- \TABLEtail
- \TABLEnoalign{\global\setfalse\preventTABLEbreak}}
-
-\def\verysimpleTableHL
- {\TABLEnoalign{\expandafter\normalTABLEfullrule\@@tiHLheight}}
-
-\def\dorestartTABLE#1%
- {\gdef\restartTABLE{#1}%
- \restartTABLE
- \TABLEnoalign{\globalpushmacro\simpleTableHL\global\let\simpleTableHL\verysimpleTableHL}%
- \insertTABLEhead
+\def\doinserttablehead
+ {\starttablenoalign
+ \global\settrue\preventtablebreak
+ \global\setfalse\hassometablehead
+ \stoptablenoalign
+ \dotablehead
+ \starttablenoalign
+ \global\setfalse\preventtablebreak
+ \stoptablenoalign}
+
+\def\doinserttabletail
+ {\starttablenoalign
+ \global\settrue\preventtablebreak
+ \global\setfalse\hassometabletail
+ \stoptablenoalign
+ \dotabletail
+ \starttablenoalign
+ \global\setfalse\preventtablebreak
+ \stoptablenoalign}
+
+% \def\doverysimpletableHL % todo
+% {\starttablenoalign
+% \expandafter\donormaltablefullrule\@@tiHLheight
+% \stoptablenoalign}
+
+\def\dorestarttable#1%
+ {\gdef\restarttable{#1}%
+ \restarttable
+% \starttablenoalign
+% \globalpushmacro\simpletableHL
+% \global\let\simpletableHL\doverysimpletableHL
+% \stoptablenoalign
+ \doinserttablehead
\ifsplittables \ifconditional \tablerepeattail
- \TABLEnoalign{\goodbreak}%
- \insertTABLEtail
- \TABLEnoalign{\goodbreak}%
+ \tablenoalign{\goodbreak}%
+ \doinserttabletail
+ \tablenoalign{\goodbreak}%
\fi \fi
- \TABLEnoalign{\globalpopmacro\simpleTableHL}}
+% \starttablenoalign
+% \globalpopmacro\simpletableHL
+% \stoptablenoalign
+ }
\bgroup \catcode`|=\othercatcode \catcode`"=\othercatcode
-\gdef\complexsecondstagestartTABLE#1[#2]% brr nested mess
+\gdef\complexsecondstagestarttable#1[#2]% brr nested mess
{\bgroup
\@@useotherbar
\@@useotherquote
- \global\setfalse\someTABLEhead
- \global\setfalse\someTABLEtail
+ \global\setfalse\tableactionstatepermitted
+ \global\setfalse\hassometablehead
+ \global\setfalse\hassometabletail
\expanded{\doifinstringelse{|}{#2}}
- {\xdef\restartTABLE{\noexpand\dorestartTABLE{\noexpand\thirdstagestartTABLE{#2}}}}
- {\doifdefinedelse{\c!Table#2}
- {\gdef\restartTABLE{\getvalue{\c!Table#2}}}
- {\gdef\restartTABLE{\dorestartTABLE{\getvalue{#2}}}}}%
+ {\xdef\restarttable{\noexpand\dorestarttable{\noexpand\thirdstagestarttable{#2}}}}
+ {\doifdefinedelse{\s!TaBlE#2}
+ {\gdef\restarttable{\getvalue{\s!TaBlE#2}}}
+ {\gdef\restarttable{\dorestartable{\getvalue{#2}}}}}%
\egroup
- \restartTABLE}
+ \restarttable}
\egroup
@@ -1784,25 +1778,25 @@
\newtoks \localtabledefinitions
-\def\thirdstagestartTABLE#1%
- {\global\setTABLEactiontrue
- \setTABLEaction\TABLEunknown
- \setTABLEforce\TABLEunknown
- \tableresetVLvalues
- \appendtoks\localTABLEsetup\to\everytable
- \tablestandardbegin[\ifsplittables u\else b\fi]%
+\def\thirdstagestarttable#1%
+ {\global\settrue\tableactionstatepermitted
+ \dosettableaction\tableunknownstate
+ \dosettableforce\tableunknownstate
+ \dotableresetVLvalues
+ \appendtoks\dolocaltablesetup\to\everytable
+ \dotablestandardbegin[\ifsplittables u\else b\fi]%
\the\localtabledefinitions
\forgetall % added
\doifsomething{#1}
- {\def\TABLEformat{#1}%
- \getTABLEnofcolumns\TABLEformat
+ {\def\currenttableformat{#1}%
+ \dogettablenofcolumns\currenttableformat
% more modern is to use catcode tables
- \expandafter\BeginFormat\TABLEformat\EndFormat}}
+ \expandafter\dobegintableformat\currenttableformat\doendtableformat}}
-\def\finishTABLE
- {\chuckTABLEautorow
+\def\dofinishtable
+ {\dochucktableautorow
\unskip\crcr
- \tablenormalend
+ \dotablenormalend
\global\intablefalse
\egroup}
@@ -1839,33 +1833,30 @@
\flushnotes
\setbox\tablecontentbox\vbox\bgroup
\forgetall
- \global\TABLEinbreakfalse
- \firststagestartTABLE}
+ \firststagestarttable}
\let\stoptables\relax % needed for \noalign
\def\dostoptables
- {\chuckTABLEautorow % AM: before the tail, else noalign problem
- \ifconditional\tablerepeattail\else\insertTABLEtail\fi
- \finishTABLE
+ {\dochucktableautorow % AM: before the tail, else noalign problem
+ \ifconditional\tablerepeattail\else\doinserttabletail\fi
+ \dofinishtable
\egroup
\dontcomplain
\dosplittablebox\tablecontentbox
\flushnotes
\egroup}
-\newdimen\TABLEcaptionheight % obsolete
-
\def\dosplittablebox#1%
{\resettsplit
\def\tsplitminimumfreelines{2}%
- \def\tsplitminimumfreespace{\TABLEcaptionheight}%
+ \def\tsplitminimumfreespace{\zeropoint}%
\setbox\tsplitcontent\box#1%
- \ifconditional\tablerepeathead \ifconditional\someTABLEhead
+ \ifconditional\tablerepeathead \ifconditional\hassometablehead
\setbox\tsplithead\vsplit\tsplitcontent to \lineheight
\setbox\tsplithead\vbox{\unvbox\tsplithead}%
\fi \fi
- \ifconditional\tablerepeattail \ifconditional\someTABLEtail
+ \ifconditional\tablerepeattail \ifconditional\hassometabletail
\setbox\tsplittail\vsplit\tsplitcontent to \lineheight
\setbox\tsplittail\vbox{\unvbox\tsplittail}%
\fi \fi
@@ -1903,8 +1894,8 @@
%D the fact that the stopcondition is interface language
%D dependant.
-\let\@@TABLEhead\empty \def\TABLEhead{\@@TABLEhead}
-\let\@@TABLEtail\empty \def\TABLEtail{\@@TABLEtail}
+\let\dotablehead\empty % needs checking
+\let\dotabletail\empty % needs checking
\letbeundefined{\e!start\v!tablehead}
\letbeundefined{\e!stop \v!tablehead}
@@ -1913,23 +1904,24 @@
\expanded
{\long\def\csname\e!start\v!tablehead\endcsname##1\csname\e!stop\v!tablehead\endcsname%
- {\noexpand\setTABLEhead##1\noexpand\end}}
+ {\noexpand\settablehead##1\noexpand\end}}
\expanded
{\long\def\csname\e!start\v!tabletail\endcsname##1\csname\e!stop\v!tabletail\endcsname%
- {\noexpand\setTABLEtail##1\noexpand\end}}
+ {\noexpand\settabletail##1\noexpand\end}}
%D The second argument is a dummy one, by scanning for it, we
%D get rid of interfering spaces.
-\def\setTABLEhead{\dodoubleempty\dosetTABLEhead}
-\def\setTABLEtail{\dodoubleempty\dosetTABLEtail}
+\newconditional\preventtablebreak
+\newconditional\hassometablehead
+\newconditional\hassometabletail
-\newconditional\preventTABLEbreak
-\newconditional\someTABLEhead
+\def\settablehead{\dodoubleempty\dosettablehead}
+\def\settabletail{\dodoubleempty\dosettabletail}
-\long\def\dosetTABLEhead[#1][#2]#3\end{\setvalue{@@TABLEhead#1}{\TABLEnoalign{\global\settrue\someTABLEhead}#3}}
-\long\def\dosetTABLEtail[#1][#2]#3\end{\setvalue{@@TABLEtail#1}{\TABLEnoalign{\global\settrue\someTABLEtail}#3}}
+\long\def\dosettablehead[#1][#2]#3\end{\setvalue{@@tablehead#1}{\tablenoalign{\global\settrue\hassometablehead}#3}}
+\long\def\dosettabletail[#1][#2]#3\end{\setvalue{@@tabletail#1}{\tablenoalign{\global\settrue\hassometabletail}#3}}
%D Redudant \type{\HL}'s are removed automatically, so
%D mid||lines can be used without problems.
@@ -1939,14 +1931,15 @@
%D screen and log file as well as visualized in the table in
%D teletype.
-\def\finishTABLErow
+\def\dofinishtablerow
{\crcr
- \TABLEnoalign
- {\nobreak
- \setTABLEaction\TABLEunknown
- \globalletempty\checkTABLEautorow
- \globalletempty\chuckTABLEautorow
- \global\currentTABLEcolumn\zerocount}}
+ \starttablenoalign
+ \nobreak
+ \dosettableaction\tableunknownstate
+ \globalletempty\dochecktableautorow
+ \globalletempty\dochucktableautorow
+ \global\currenttablecolumn\zerocount
+ \stoptablenoalign}
%D Next we enter the more complicated area of column and row
%D switching. I won't go into much detail from now on, but just
@@ -1991,47 +1984,47 @@
%D handles alignments, we cannot automate spacing for colored
%D rows and columns.
-\setnewconstant\TABLErowzero\zerocount
+\setnewconstant\tablerowzero\zerocount
\appendtoks
- \let\SR\TableSR
- \let\FR\TableFR
- \let\MR\TableMR
- \let\LR\TableLR
- \let\AR\TableAR
+ \let\SR\dotableSR
+ \let\FR\dotableFR
+ \let\MR\dotableMR
+ \let\LR\dotableLR
+ \let\AR\dotableAR
\to \localtabledefinitions
-\unexpanded\def\TableSR
- {\ifnum\TABLEaction=\TABLEfirstrow
+\unexpanded\def\dotableSR
+ {\ifnum\tableactionstate=\tablefirstrowstate
\writestatus\m!TABLE{change \string\SR\space into \string\MR/\string\LR}%
- \else\ifnum\TABLEaction=\TABLEmidrow
+ \else\ifnum\tableactionstate=\tablemidrowstate
\writestatus\m!TABLE{change \string\SR\space into \string\MR/\string\LR}%
- \else\ifnum\TABLEaction=\TABLEmidrow
+ \else\ifnum\tableactionstate=\tablemidrowstate
\writestatus\m!TABLE{change \string\SR\space into \string\MR/\string\LR}%
\fi\fi\fi
- \endTABLErow\TABLEseparaterow\TABLErowfactor\TABLErowfactor}
+ \doendtablerow\tableseparaterowstate\tablerowfactor\tablerowfactor}
-\unexpanded\def\TableFR
- {\ifnum\TABLEaction=\TABLEmidrow
+\unexpanded\def\dotableFR
+ {\ifnum\tableactionstate=\tablemidrowstate
\writestatus\m!TABLE{change \string\FR\space into \string\MR/\string\LR}%
- \else\ifnum\TABLEaction=\TABLElastrow
+ \else\ifnum\tableactionstate=\tablelastrowstate
\writestatus\m!TABLE{change \string\FR\space into \string\MR/\string\LR}%
\fi\fi
- \endTABLErow\TABLEfirstrow\TABLErowfactor\TABLErowzero}
+ \doendtablerow\tablefirstrowstate\tablerowfactor\tablerowzero}
-\unexpanded\def\TableMR
- {\ifnum\TABLEaction=\TABLErule
+\unexpanded\def\dotableMR
+ {\ifnum\tableactionstate=\tablerulestate
\writestatus\m!TABLE{change \string\MR\space into \string\FR/\string\SR}%
- \else\ifnum\TABLEaction=\TABLElastrow
+ \else\ifnum\tableactionstate=\tablelastrowstate
\writestatus\m!TABLE{change \string\MR\space into \string\FR}%
\fi\fi
- \endTABLErow\TABLEmidrow00}
+ \doendtablerow\tablemidrowstate00}
-\unexpanded\def\TableLR
- {\ifnum\TABLEaction=\TABLErule
+\unexpanded\def\dotableLR
+ {\ifnum\tableactionstate=\tablerulestate
\writestatus\m!TABLE{change \string\LR\space into \string\FR/\string\SR}%
\fi
- \endTABLErow\TABLElastrow\TABLErowzero\TABLErowfactor}
+ \doendtablerow\tablelastrowstate\tablerowzero\tablerowfactor}
%D \macros
%D {ifcheckTABLEcolums}
@@ -2044,80 +2037,69 @@
%D determine the available space are shown (in color). By default
%D checking is off.
-\let\beforeTABLEline\empty
-\let\afterTABLEline \empty
-
-\def\doendTABLErow#1#2#3%
- {\handleTABLEbreak#2#3%
- \beforeTABLEline
+\def\doendtablerow#1#2#3%
+ {\dosettableaction#1%
\ifcase#1\relax
% unknown
\or
- \endofTABLEline[blue][\SR->\SR]\TABLErowfactor\TABLErowfactor
+ \doendoftableline [blue][\SR->\SR]\tablerowfactor\tablerowfactor
\or
- \endofTABLEline[red][\FR->\FR]\TABLErowfactor\TABLErowzero
- \or
- \ifnum\TABLEforce=\TABLEforcelastrow
- \endofTABLEline[red][\MR->\LR]\TABLErowzero\TABLErowfactor
- \else\ifnum\TABLEforce=\TABLEforcefirstrow
- \endofTABLEline[red][\MR->\FR]\TABLErowfactor\TABLErowzero
- \else
- \endofTABLEline[green][\MR->\MR]\TABLErowzero\TABLErowzero
- \fi\fi
- \or
- \endofTABLEline[red][\LR->\LR]\TABLErowzero\TABLErowfactor
+ \doendoftableline [red][\FR->\FR]\tablerowfactor\tablerowzero
+ \or\ifnum\tableforcestate=\tableforcelastrowstate
+ \doendoftableline [red][\MR->\LR]\tablerowzero\tablerowfactor
+ \else\ifnum\tableforcestate=\tableforcefirstrowstate
+ \doendoftableline [red][\MR->\FR]\tablerowfactor\tablerowzero
+ \else
+ \doendoftableline[green][\MR->\MR]\tablerowzero\tablerowzero
+ \fi\fi\or
+ \doendoftableline [red][\LR->\LR]\tablerowzero\tablerowfactor
\fi
- \TABLEnoalign
- {\setTABLEforce\TABLEunknown
- \global\currentTABLEcolumn\zerocount}%
- \afterTABLEline}
-
-\def\endTABLErow#1#2#3%
- {\setTABLEaction#1%
- \doendTABLErow{#1}{#2}{#3}}
+ \starttablenoalign
+ \dosettableforce\tableunknownstate
+ \global\currenttablecolumn\zerocount
+ \ifconditional\preventtablebreak
+ \nobreak
+ \else
+ \goodbreak
+ \fi
+ \stoptablenoalign}
%D Handling \type{\AR} is postponed till the next row. The
%D check takes care of the first and mid rows, the chuck macro
%D |<|how about that name|>| handles the last row.
-\unexpanded\def\TableAR
- {\globallet\checkTABLEautorow\docheckTABLEautorow
- \globallet\chuckTABLEautorow\dochuckTABLEautorow}
+\unexpanded\def\dotableAR
+ {\globallet\dochecktableautorow\dodochecktableautorow
+ \globallet\dochucktableautorow\dodochucktableautorow}
-\let\checkTABLEautorow\empty
-\let\chuckTABLEautorow\empty
+\let\dochecktableautorow\empty
+\let\dochucktableautorow\empty
-\def\docheckTABLEautorow
- {\globallet\checkTABLEautorow\empty
- \ifnum\TABLEaction=\TABLErule \FR
- \else\ifnum\TABLEaction=\TABLEunknown \FR
- \else \MR
- \fi\fi}
+\def\dodochecktableautorow
+ {\globallet\dochecktableautorow\empty
+ \ifnum\tableactionstate=\tablerulestate \FR\else
+ \ifnum\tableactionstate=\tableunknownstate\FR\else
+ \MR\fi\fi}
+
+\def\dodochucktableautorow
+ {\globalletempty\dochecktableautorow
+ \globalletempty\dochucktableautorow
+ \ifnum\tableactionstate=\tablerulestate \SR\else
+ \ifnum\tableactionstate=\tableunknownstate\SR\else
+ \LR\fi\fi}
-\def\dochuckTABLEautorow
- {\globalletempty\checkTABLEautorow
- \globalletempty\chuckTABLEautorow
- \ifnum\TABLEaction=\TABLErule \SR
- \else\ifnum\TABLEaction=\TABLEunknown \SR
- \else \LR
- \fi\fi}
-
%D When a table is split, we also add a tail and when present
%D we repeat the table head.
-\def\handleTABLEbreak#1#2%
- {\globalletempty\beforeTABLEline
- \gdef\afterTABLEline{\TABLEnoalign{\ifconditional\preventTABLEbreak\nobreak\else\goodbreak\fi}}}
-
%D When tables are split, the spacing before and after a
%D horizontal rule is corrected according to what we expect.
-\def\endofTABLEline[#1][#2->#3]#4#5%
+\def\doendoftableline[#1][#2->#3]#4#5%
{\ifx#2#3\else
\writestatus\m!TABLE{\string#2\space changed into \string#3}%
\fi
- \expandafter\normalTABLElineformat#4#5\crcr % \crcr nodig ?
- \TABLEnoalign{\nobreak\global\setTABLEactiontrue}}
+ \expandafter\donormaltablelineformat#4#5\crcr % \crcr nodig ?
+ \tablenoalign{\nobreak\global\settrue\tableactionstatepermitted}}
%D In order to prevent (as good as possible) alignment overflow
%D and therefore \TEX\ error messages, we check the maximum
@@ -2126,7 +2108,7 @@
%D the number of \type{|}'s and \type{\VL}'s or alike is always
%D one more than the number of columns.
-\newcount\currentTABLEcolumn
+\newcount\currenttablecolumn
%D While defining this macro we change the \CATCODE\ of
%D \type{|}. When counting the bars, we use a non active
@@ -2143,7 +2125,7 @@
\bgroup \catcode`\|=\othercatcode
-\gdef\getTABLEnofcolumns#1% todo: also divert this to lua as with tabulate
+\gdef\dogettablenofcolumns#1% todo: also divert this to lua as with tabulate
{\bgroup
\cleanupfeatures % needed !
\@@useotherbar
@@ -2166,18 +2148,18 @@
\let \tablecurrenthrulecolor \empty
\appendtoks
- \let\VL\TableVL
- \let\VC\TableVC
- \let\HL\TableHL
- \let\HC\TableHC
- \let\VS\TableVS
- \let\VD\TableVD
- \let\VT\TableVT
- \let\VN\TableVN
+ \let\VL\dotableVL
+ \let\VC\dotableVC
+ \let\HL\dotableHL
+ \let\HC\dotableHC
+ \let\VS\dotableVS
+ \let\VD\dotableVD
+ \let\VT\dotableVT
+ \let\VN\dotableVN
\to \localtabledefinitions
-\def\tableresetVLvalues
- {\global\currentTABLEcolumn\zerocount}
+\def\dotableresetVLvalues
+ {\global\currenttablecolumn\zerocount}
\def\dotablevrulecommand#1% global assignments
{\doifnumberelse{#1}
@@ -2185,20 +2167,20 @@
\global\multiply\tablevrulethicknessfactor\@@tiVLwidth\relax}
{\xdef\tablecurrentvrulecolor{#1}}}
-\unexpanded\def\TableVL
- {\checkTABLEautorow
- \global\advance\currentTABLEcolumn\plusone
- \dosingleempty\doTableVL}
+\unexpanded\def\dotableVL
+ {\dochecktableautorow
+ \global\advance\currenttablecolumn\plusone
+ \dosingleempty\dodotableVL}
-\def\doTableVL[#1]%
+\def\dodotableVL[#1]%
{\global\tablecurrentvrulecolor\empty
\global\tablevrulethicknessfactor\@@tiVLwidth\relax
\iffirstargument
\rawprocesscommalist[#1]\dotablevrulecommand
\fi
- \normalexpanded{\noexpand\normalTABLEcomplexbar\the\tablevrulethicknessfactor} }% \relax breaks \use
+ \normalexpanded{\noexpand\donormaltablecomplexbar\the\tablevrulethicknessfactor} }% \relax breaks \use
-\let\TableVC\TableVL % for mojca
+\let\doableVC\dotableVL % for mojca
% \starttable[|||]
% \HL
@@ -2208,10 +2190,10 @@
% \HL
% \stoptable
-\unexpanded\def\TableVS {\VN1}
-\unexpanded\def\TableVD {\VN2}
-\unexpanded\def\TableVT {\VN3}
-\unexpanded\def\TableVN#1{\gdef\@VLn{#1}\VL}
+\unexpanded\def\dotableVS {\VN1}
+\unexpanded\def\dotableVD {\VN2}
+\unexpanded\def\dotableVT {\VN3}
+\unexpanded\def\dotableVN#1{\gdef\@VLn{#1}\VL}
\def\dotablehrulecommand#1% global assignments
{\doifnumberelse{#1}
@@ -2219,20 +2201,20 @@
\global\multiply\tablehrulethicknessfactor\@@tiHLheight\relax}
{\xdef\tablecurrenthrulecolor{#1}}}
-\unexpanded\def\TableHL
- {\chuckTABLEautorow
- \finishTABLErow
- \startTABLEnoalign
- \dosingleempty\doTableHL}
+\unexpanded\def\dotableHL
+ {\dochucktableautorow
+ \dofinishtablerow
+ \starttablenoalign
+ \dosingleempty\dodotableHL}
-\def\doTableHL[#1]%
+\def\dodotableHL[#1]%
{\nobreak
- \ifnum\TABLEaction=\TABLErule
+ \ifnum\tableactionstate=\tablerulestate
\writestatus\m!TABLE{skipping \string\HL}% \statusmessage
\else
- \ifnum\TABLEaction=\TABLEmidrow
+ \ifnum\tableactionstate=\tablemidrowstate
\writestatus\m!TABLE{change \string\MR\space into \string\LR/\string\SR}%
- \else\ifnum\TABLEaction=\TABLEfirstrow
+ \else\ifnum\tableactionstate=\tablefirstrowstate
\writestatus\m!TABLE{change \string\MR\space into \string\SR}%
\fi\fi
\bgroup
@@ -2244,15 +2226,15 @@
\switchtocolor[\tablecurrenthrulecolor]%
\fi
\fi
- \normalexpanded{\noexpand\normalTABLEfullrule\the\tablehrulethicknessfactor} %
+ \normalexpanded{\noexpand\donormaltablefullrule\the\tablehrulethicknessfactor} %
\egroup
- \accountTABLElinewidth
+ \doaccounttablelinewidth
\fi
- \setTABLEaction\TABLErule
+ \dosettableaction\tablerulestate
\nobreak
- \stopTABLEnoalign}
+ \stoptablenoalign}
-\let\TableHC\TableHL % for mojca
+\let\dotableHC\dotableHL % for mojca
%D \startitemize[3*ruim]
%D \sym{\type{\NL}} a vertical skip
@@ -2274,39 +2256,40 @@
% n+1 uitleggen
\appendtoks
- \let\TB\TableTB
- \let\NL\TableNL % old
- \let\NR\TableNR
- \let\NC\TableNC
- \let\FC\TableNC
- \let\MC\TableNC
- \let\LC\TableNC
+ \let\TB\dotableTB
+ \let\NL\dotableNL % old
+ \let\NR\dotableNR
+ \let\NC\dotableNC
+ \let\FC\dotableNC
+ \let\MC\dotableNC
+ \let\LC\dotableNC
\to \localtabledefinitions
-\unexpanded\def\TableTB
- {\chuckTABLEautorow
- \finishTABLErow
- \startTABLEnoalign
- \dosingleempty\doTableTB}
+\unexpanded\def\dotableTB
+ {\dochucktableautorow
+ \dofinishtablerow
+ \starttablenoalign
+ \dosingleempty\dotableTB}
-\def\doTableTB[#1]%
+\def\dotableTB[#1]%
{\blank[\iffirstargument#1\else\@@tiNL\fi]%
\nobreak
- \stopTABLEnoalign}
-
-\let\TableNL\TableTB
-
-\unexpanded\def\TableNR
- {\global\currentTABLEcolumn\zerocount
- \normalTABLElineending
- \TABLEnoalign
- {\nobreak
- \setTABLEaction\TABLEunknown}}
-
-\unexpanded\def\TableNC
- {\checkTABLEautorow
- \global\advance\currentTABLEcolumn \plusone
- \normalTABLEquote}
+ \stoptablenoalign}
+
+\let\dotableNL\dotableTB
+
+\unexpanded\def\dotableNR
+ {\global\currenttablecolumn\zerocount
+ \donormaltablelineending
+ \starttablenoalign
+ \nobreak
+ \dosettableaction\tableunknownstate
+ \stoptablenoalign}
+
+\unexpanded\def\dotableNC
+ {\dochecktableautorow
+ \global\advance\currenttablecolumn \plusone
+ \donormaltablequote}
%D \startitemize[3*broad]
%D \sym{\type{\DL}}
@@ -2315,21 +2298,21 @@
%D \sym{\type{\DR}}
%D \stopitemize
-\newif\ifTABLEdivision
+\newconditional\istabledivision
\appendtoks
- \global\TABLEdivisionfalse % in start
- \let\DL\TableDL
- \let\DC\TableDC
- \let\DV\TableDV
- \let\DR\TableDR
+ \global\setfalse\istabledivision
+ \let\DL\dotableDL
+ \let\DC\dotableDC
+ \let\DV\dotableDV
+ \let\DR\dotableDR
\to \localtabledefinitions
-\def\checkTABLEdivision
- {\ifTABLEdivision \else
- \chuckTABLEautorow
- \global\currentTABLEcolumn\zerocount
- \global\TABLEdivisiontrue
+\def\dochecktabledivision
+ {\ifcondition\istabledivision\else
+ \dochucktableautorow
+ \global\currenttablecolumn\zerocount
+ \global\settrue\istabledivision
\fi}
\def\dotabledrulecommand#1% global assignments
@@ -2342,20 +2325,20 @@
\fi}
{\xdef\tablecurrenthrulecolor{#1}}}
-\unexpanded\def\TableDL
- {\checkTABLEdivision
- \dosingleempty\doTableDL}
+\unexpanded\def\dotableDL
+ {\dochecktabledivision
+ \dosingleempty\dotableDL}
-\def\doTableDL[#1]%
- {\ifnum\TABLEaction=\TABLErule
+\def\dotableDL[#1]%
+ {\ifnum\tableactionstate=\tablerulestate
\writestatus\m!TABLE{skipping \string\DL}%
\else
- \ifnum\TABLEaction=\TABLEmidrow
+ \ifnum\tableactionstate=\tablemidrowstate
\writestatus\m!TABLE{change \string\MR\space into \string\LR/\string\SR}%
- \else\ifnum\TABLEaction=\TABLEfirstrow
+ \else\ifnum\tableactionstate=\tablefirstrowstate
\writestatus\m!TABLE{change \string\MR\space into \string\SR}%
\fi\fi
- \setTABLEaction\TABLEunknown
+ \dosettableaction\tableunknownstate
\global\tablehrulethicknessfactor\@@tiHLheight\relax
\global\tabledrulespan\zerocount
\iffirstargument
@@ -2366,76 +2349,74 @@
\fi
\fi
\ifcase\tabledrulespan
- \global\advance\currentTABLEcolumn \plusone
- \let\next\normalTABLEsinglerule
+ \global\advance\currenttablecolumn \plusone
+ \let\next\donormaltablesinglerule
\or
- \global\advance\currentTABLEcolumn \plustwo
- \let\next\normalTABLEsinglerule
+ \global\advance\currenttablecolumn \plustwo
+ \let\next\donormaltablesinglerule
\else
- \global\advance\currentTABLEcolumn \plusone
- \edef\next{\noexpand\normalTABLEmultirule{\tabledrulespan} }%
+ \global\advance\currenttablecolumn \plusone
+ \edef\next{\noexpand\donormaltablemultirule{\tabledrulespan} }%
\fi
\next
\fi}
-\unexpanded\def\TableDV
- {\TableDCV\normalTABLEsimplebar}
+\unexpanded\def\dotableDV
+ {\dotableDCV\donormaltablesimplebar}
-\unexpanded\def\TableDC
- {\TableDCV\normalTABLEquote}
+\unexpanded\def\dotableDC
+ {\dotableDCV\donormaltablequote}
-\unexpanded\def\TableDCV#1%
- {\checkTABLEdivision
- \checkTABLEautorow
- \global\advance\currentTABLEcolumn \plusone
+\unexpanded\def\dotableDCV#1%
+ {\dochecktabledivision
+ \dochecktableautorow
+ \global\advance\currenttablecolumn \plusone
#1}
-\unexpanded\def\TableDR
- {\global\currentTABLEcolumn\zerocount % nog check
- \normalTABLElineending
- \TABLEnoalign
- {\nobreak
- \global\TABLEdivisionfalse
- \accountTABLElinewidth % temporary solution
- \setTABLEaction\TABLErule}}
-
-\def\accountTABLElinewidth
+\unexpanded\def\dotableDR
+ {\global\currenttablecolumn\zerocount % nog check
+ \donormaltablelineending
+ \starttablenoalign
+ \nobreak
+ \global\setfalse\istabledivision
+ \doaccounttablelinewidth % temporary solution
+ \dosettableaction\tablerulestate
+ \stoptablenoalign}
+
+\def\doaccounttablelinewidth
{\scratchdimen\tablelinethicknessunit}
-\def\doTableSPAN#1{\use{#1}}
-\def\doTableTWO {\use2}
-\def\doTableTHREE {\use3}
-\def\doTableFOUR {\use4}
-\def\doTableFIVE {\use5}
-\def\doTableSIX {\use6}
-\def\doTableREF {\ReFormat}
+\def\dotableTWO {\use2}
+\def\dotableTHREE {\use3}
+\def\dotableFOUR {\use4}
+\def\dotableFIVE {\use5}
+\def\dotableSIX {\use6}
\appendtoks
- \let\SPAN \doTableSPAN
- \let\TWO \doTableTWO
- \let\THREE\doTableTHREE
- \let\FOUR \doTableFOUR
- \let\FIVE \doTableFIVE
- \let\SIX \doTableSIX
- \let\REF \doTableREF
+ \let\TWO \dotableTWO
+ \let\THREE\dotableTHREE
+ \let\FOUR \dotableFOUR
+ \let\FIVE \dotableFIVE
+ \let\SIX \dotableSIX
+ \let\SPAN \use
+ \let\REF \dotablereformat
\to \localtabledefinitions
-\setvalue{\??ti:\c!distance:\v!none }{\OpenUp00\def\LOW{\Lower6 }}
-\setvalue{\??ti:\c!distance:\v!small }{\OpenUp00\def\LOW{\Lower6 }} % == baseline
-\setvalue{\??ti:\c!distance:\v!medium}{\OpenUp11\def\LOW{\Lower7 }}
-\setvalue{\??ti:\c!distance:\v!big }{\OpenUp22\def\LOW{\Lower8 }}
+\setvalue{\??ti:\c!distance:\v!none }{\dotableOpenUp00\def\LOW{\Lower6 }}
+\setvalue{\??ti:\c!distance:\v!small }{\dotableOpenUp00\def\LOW{\Lower6 }} % == baseline
+\setvalue{\??ti:\c!distance:\v!medium}{\dotableOpenUp11\def\LOW{\Lower7 }}
+\setvalue{\??ti:\c!distance:\v!big }{\dotableOpenUp22\def\LOW{\Lower8 }}
\appendtoks
- \getvalue{\??ti:\c!distance:\@@tidistance}%
+ \getvalue{\??ti:\c!distance:\@@tidistance}%
\to \localtabledefinitions
\appendtoks
\doifelse\@@tidistance\v!none
- {\TABLErowfactor\zerocount}
- {\TABLErowfactor\plustwo }%
+ {\tablerowfactor\zerocount}
+ {\tablerowfactor\plustwo }%
\to \localtabledefinitions
-
\def\dohandlebar % here ?
{\ifmmode
\@EA\domathmodebar
@@ -2444,7 +2425,7 @@
\else
\@EAEAEA\dotextmodebar
\fi\fi}
-
+
\unexpanded\def\setuptables
{\dosingleargument\dosetuptables}
@@ -2452,17 +2433,15 @@
{\getparameters[\??ti][#1]%
\processaction % we have a command for this
[\@@tialign]
- [ \v!right=>\def\TABLEparalignment{\raggedright},
- \v!left=>\def\TABLEparalignment{\raggedleft},
- \v!middle=>\def\TABLEparalignment{\raggedcenter},
- \s!default=>\def\TABLEparalignment{\notragged},
- \s!unknown=>\def\TABLEparalignment{\notragged}]%
+ [ \v!right=>\def\dotableparalignment{\raggedright},
+ \v!left=>\def\dotableparalignment{\raggedleft},
+ \v!middle=>\def\dotableparalignment{\raggedcenter},
+ \s!default=>\def\dotableparalignment{\notragged},
+ \s!unknown=>\def\dotableparalignment{\notragged}]%
\assignalfadimension\@@tiVL\@@tiVLwidth 246%
\assignalfadimension\@@tiHL\@@tiHLheight246}
-\let\tablelinethicknessfactor\plusfour
-
-\def\localTABLEsetup
+\def\dolocaltablesetup
{\@@ticommands\relax
\expanded{\switchtobodyfont[\@@tibodyfont]}% expanded ?
\tablelinethicknessunit\dimexpr\@@tirulethickness/\tablelinethicknessfactor\relax
@@ -2528,7 +2507,7 @@
%D {$\vcenter{\getbuffer[b]}$}
%D {\hbox{tabulate}}
%D \stopcombination
-
+
\setuptables
[HL=\v!medium,
VL=\v!medium,
@@ -2547,6 +2526,4 @@
\c!backgroundcolor=,
\c!split=\v!auto]
-\def\ifintabel{\ifintable} % upward compatible
-
\protect \endinput
diff --git a/tex/context/base/tabl-tbl.lua b/tex/context/base/tabl-tbl.lua
index 4531b2134..86968721b 100644
--- a/tex/context/base/tabl-tbl.lua
+++ b/tex/context/base/tabl-tbl.lua
@@ -21,8 +21,7 @@ local nested = lpeg.patterns.nested
local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
function commands.presettabulate(preamble)
- -- todo: lpeg
- preamble = string.escapedpattern(preamble)
+ -- todo: lpeg but not now
preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p)
return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1)
end)
diff --git a/tex/context/base/tabl-tbl.mkiv b/tex/context/base/tabl-tbl.mkiv
index 9428ea3dc..9ff6b0f9e 100644
--- a/tex/context/base/tabl-tbl.mkiv
+++ b/tex/context/base/tabl-tbl.mkiv
@@ -98,11 +98,14 @@
%
% present but not yet 100% ok
%
-% \FL top hrule
-% \ML mid hrule (with auto split)
-% \LL bottom hrule
-% \HL
-% \VL
+% \TL [width,color] bottom hrule
+% \FL [width,color] first hrule
+% \ML [width,color] mid hrule (with auto split)
+% \LL [width,color] bottom hrule
+% \BL [width,color] last hrule
+%
+% \HL [width,color] top rule
+% \VL [width,color]
%
% \CC \CL \CM \CR color
%
@@ -210,8 +213,11 @@
\newconstant \tabulatelocalcolorspan
\newdimen \defaulttabulatevrulethickness
+\newdimen \defaulttabulatehrulethickness
\newdimen \tabulatevrulethickness
+\newdimen \tabulatehrulethickness
\newdimen \tabulatelocalvrulethickness
+\newdimen \tabulatelocalhrulethickness
\newskip \tabulateseparatorskip
@@ -234,10 +240,20 @@
\def\handletabulatepbreakfalse{\setfalse\tabulatehandlepbreak} % depricated
-% handy
+% handy helper
\def\tabulatenoalign
- {\noalign\bgroup\let\noalign\relax\let\next=}
+ {\noalign
+ \bgroup
+ \let\noalign\relax
+ \let\tabulatenoalign\relax
+ \let\next=}
+
+\def\starttabulatenoalign
+ {\tabulatenoalign\bgroup}
+
+\def\stoptabulatenoalign
+ {\iffalse{\else}\fi}
% [|lg{.}|] => \NG 12.34 \NC
@@ -284,7 +300,7 @@
\fi}
\def\dotabulatenobreak
- {\noalign{\dodotabulatenobreak}}
+ {\tabulatenoalign{\dodotabulatenobreak}}
\unexpanded\def\notabulatehook
{}
@@ -548,7 +564,7 @@
\let\tabulatereshape\plusone,
\s!unknown=>\tabulatewidth#1\relax]%
\ifnum\tabulatedimen=\plusone
- \global\advance\tabulatepwidth\tabulatewidth
+ \global\advance\tabulatepwidth\tabulatewidth % accumulated parwidth
\fi
\settabulatepreamble}
@@ -759,9 +775,9 @@
\fi
\copyparameters
[\??tt#1::#2][\??tt#1::]%
- [\c!unit,\c!distance,\c!before,\c!bodyfont,\c!after,
- \c!inner,\c!indenting,\c!frame,\c!split,\c!header,\c!title,
- \c!margin,\c!align,\c!rulecolor,\c!rulethickness,EQ]%
+ [\c!frame,\c!distance,\c!unit,\c!before,\c!bodyfont,\c!after,
+ \c!inner,\c!indenting,\c!margin,\c!align,\c!header,\c!title,
+ \c!rulecolor,\c!rulethickness,\c!split,EQ]%
\setuvalue{\e!start#1::#2}{\dofinalstarttabulate[#1][#2][#3]}%
\setuvalue{\e!start#1}{\bgroup\dosubstarttabulate[#1]}%
\letvalueempty{\??tt#1-\v!header}%
@@ -832,6 +848,15 @@
\setuvalue{\e!start\v!tabulate}%
{\bgroup\dodoubleempty\donormalstarttabulate}
+\def\doinhibitnestedtabulate
+ {\setuvalue{\e!start\v!tabulate}%
+ {\em Nested tabulate is not (yet) supported.\relax
+ \expandafter\gobbleuntil\csname\e!stop\v!tabulate\endcsname}}
+
+\appendtoks
+ \doinhibitnestedtabulate
+\to \everytabulate
+
\setuvalue{\e!stop\v!tabulate }{}
\setuvalue{\e!stop\v!tabulatehead}{}
\setuvalue{\e!stop\v!tabulatetail}{}
@@ -987,7 +1012,7 @@
\else\ifnum\tabulatecolumn=\noftabulatecolumns
\global\tabulatehaslastrulespec\plusone
\fi\fi
- \global\let\currenttabulatelocalvrulecolor\empty
+ \global\let\currenttabulatelocalvrulecolor\defaulttabulatevrulecolor
\global\tabulatelocalvrulethickness\defaulttabulatevrulethickness
\doiffastoptionalcheckelse{\tabulatevruledcolumnyes#1}{\tabulatevruledcolumnnop#1}}
@@ -1003,16 +1028,22 @@
{\rawprocesscommalist[#2]\dotabulatevrulelocalcommand
\tabulatenormalcolumn#1}
+\def\donormaltabulatevrule
+ {\vrule\!!width\tabulatevrulethickness\relax}
+
+\def\docoloredtabulatevrule
+ {\faststartcolor[\currenttabulatevrulecolor]%
+ \vrule\!!width\tabulatevrulethickness\relax
+ \faststopcolor}
+
\unexpanded\def\dotabulatevrule
{\ifcase\tabulatevrulethickness\else
\iftrialtypesetting
- \vrule\!!width\tabulatevrulethickness\relax
+ \donormaltabulatevrule
\else\ifx\currenttabulatevrulecolor\empty
- \vrule\!!width\tabulatevrulethickness\relax
+ \donormaltabulatevrule
\else
- \faststartcolor[\currenttabulatevrulecolor]%
- \vrule\!!width\tabulatevrulethickness\relax
- \faststopcolor
+ \docoloredtabulatevrule
\fi\fi
\fi
\global\tabulatevrulethickness\zeropoint} % nils second one
@@ -1039,47 +1070,92 @@
\setvalue{\??tt:rs:\v!none }{\tabulateseparatorskip\zeropoint\def\factor{0}}
\setvalue{\??tt:rs:\v!grid }{\tabulateseparatorskip\zeropoint\def\factor{0}}
-\def\dodotabulateruleseperator#1%
+\def\dodotabulateruleseparator#1%
{\ifcsname\??tt:rs:#1\endcsname
\csname\??tt:rs:#1\endcsname
\else
\tabulateseparatorskip#1\relax
\fi}
-\def\dotabulateruleseperator % can be sped up (will do when used frequently)
+\def\dotabulateruleseparator % can be sped up (will do when used frequently)
{\bgroup
\edef\tabulateseparatordistance{\tabulateparameter\c!distance}%
\tabulateseparatorskip\strutdp
\ifx\tabulateseparatordistance\empty\else
\let\factor\!!plusone
- \normalexpanded{\processcommalist[\tabulateparameter\c!distance]}\dodotabulateruleseperator
+ \normalexpanded{\processcommalist[\tabulateparameter\c!distance]}\dodotabulateruleseparator
\tabulateseparatorskip\factor\tabulateseparatorskip
\fi
\ifconditional\tabulatesomeamble\kern\else\vskip\fi\tabulateseparatorskip % new
\egroup}
-\def\dodotabulaterule#1%
- {\color % fastcolor ?
- [\tabulateparameter\c!rulecolor]
- {\scratchdimen\tabulateparameter\c!rulethickness#1}}
+\def\tabulateignorehrulespec#1%
+ {%\global\let\currenttabulatelocalhrulecolor\empty
+ %\global\tabulatelocalhrulethickness\defaulttabulatehrulethickness
+ \doiffastoptionalcheckelse#1#1}
-\def\dotabulaterule
- {\dodotabulaterule
- {\hrule\!!height.5\scratchdimen\!!depth.5\scratchdimen\relax
- \doifvalue{\??tt\currenttabulate\c!distance}\v!grid{\kern-\scratchdimen}}} % experimental tm-prikkels
+\def\tabulatepickuphrulespec#1%
+ {\global\let\currenttabulatelocalhrulecolor\defaulttabulatehrulecolor
+ \global\tabulatelocalhrulethickness\defaulttabulatehrulethickness
+ \doiffastoptionalcheckelse{\tabulatepresethrule#1}#1}
+
+\def\dotabulatehrulelocalcommand#1%
+ {\doifnumberelse{#1}
+ {\global\tabulatelocalhrulethickness#1\defaulttabulatehrulethickness}
+ {\xdef\currenttabulatelocalhrulecolor{#1}}}
+
+\def\tabulatepresethrule#1[#2]%
+ {\rawprocesscommalist[#2]\dotabulatehrulelocalcommand
+ #1}
+
+\def\donormaltabulatehrule
+ {\hrule
+ \!!height.5\tabulatelocalhrulethickness
+ \!!depth .5\tabulatelocalhrulethickness
+ \relax}
+
+\def\docoloredtabulatehrule
+ {\faststartcolor[\currenttabulatelocalhrulecolor]%
+ \donormaltabulatehrule
+ \faststopcolor}
+
+\unexpanded\def\dotabulatehrule
+ {\ifcase\tabulatelocalhrulethickness\else
+ \iftrialtypesetting % does not happen as we nil the caller
+ \donormaltabulatehrule
+ \else\ifx\currenttabulatelocalhrulecolor\empty
+ \donormaltabulatehrule
+ \else
+ \docoloredtabulatehrule
+ \fi\fi
+ \fi}
+
+\def\donormaltabulatehlinerule
+ {\leaders \hrule
+ \!!height\dimexpr.5\lineheight-\strutdepth
+ \!!depth-\dimexpr.5\lineheight-\strutdepth+\tabulatelocalhrulethickness
+ \hfill}
+
+\def\docoloredtabulatehlinerule
+ {\faststartcolor[\currenttabulatelocalhrulecolor]%
+ \donormaltabulatehlinerule
+ \faststopcolor}
\def\dotabulatelinerule
{\multispan\totaltabulatecolumns % \multispan is a plain macro
% for the moment this one
\strut\hskip\tabulateparameter\c!margin
% neg values are ok !
- \hskip\tabulateindent % new august 2003
- \dodotabulaterule
- {\!!heighta.5\lineheight
- \advance\!!heighta-\strutdepth
- \!!deptha-\!!heighta
- \advance\!!deptha\scratchdimen
- \leaders\hrule\!!height\!!heighta\!!depth\!!deptha\hfill}%
+ \hskip\tabulateindent\relax % new august 2003
+ \ifcase\tabulatelocalhrulethickness\else
+ \iftrialtypesetting % does not happen as we nil the caller
+ \donormaltabulatehlinerule
+ \else\ifx\currenttabulatelocalhrulecolor\empty
+ \donormaltabulatehlinerule
+ \else
+ \docoloredtabulatehlinerule
+ \fi\fi
+ \fi
\cr}
%D Color:
@@ -1217,8 +1293,8 @@
\unexpanded\def\tabulateNGone{\NC\handletabulatecharalign}
\unexpanded\def\tabulateNNone{\NC\handletabulatedigits} % new, undocumented, test first
\unexpanded\def\tabulateNDone{\NC\handletabulatedigits} % same, for old times sake
+
\unexpanded\def\tabulateHRone{\doHR\zerocount}
-\unexpanded\def\tabulateHLone{\doHL\zerocount}
\unexpanded\def\tabulateCCone{\global\tabulatelocalcolorspan\zerocount\tabulatecolorcolumn\zerocount}
\unexpanded\def\tabulateCLone{\global\tabulatelocalcolorspan\plusone \tabulatecolorcolumn\zerocount}
@@ -1232,8 +1308,10 @@
\global\tabulatecolumn\zerocount
\resettabulatepheight
\unskip\unskip\crcr\flushtabulated
- \tabulatenoalign{\the\everyaftertabulaterow}%
- \tabulatenoalign{\checktabulatepenaltiesa}}
+ \starttabulatenoalign
+ \the\everyaftertabulaterow
+ \checktabulatepenaltiesa
+ \stoptabulatenoalign}
\def\checktabulatepenaltiesa
{\iftolerantTABLEbreak\else
@@ -1317,49 +1395,6 @@
\def\tabulateeskipthree % vertical strut added august 2003
{\par\verticalstrut\vskip-\struttotal\egroup}
-\def\tabulatedoHLfour#1% #1 ignored
- {\tabulatenoalign
- {\csname
- \ifnum\noftabulatelines=\zerocount F\else
- \ifnum\noftabulatelines=\totalnoftabulatelines L\else
- M\fi\fi
- L\endcsname}}%
-
-\def\tabulatedoHRfour#1% horizontal rule line (break untested)
- {\tabulatenoalign
- {\globallet\dotabulateautoline\dotabulatelinerule
- \ifcase#1\or
- \ifnum\noftabulatelines=\zerocount
- \gdef\dotabulateautoline{\tabulatenoalign{}}%
- \else\ifnum\noftabulatelines=\totalnoftabulatelines
- \gdef\dotabulateautoline{\tabulatenoalign{}}%
- \fi\fi
- \fi
- \dotabulatenobreak}%
- \dotabulateautoline
- \tabulatenoalign
- {\nobreak
- \ifx\dotabulateautoline\dotabulatelinerule\kern-\lineheight\fi
- \ifnum\noftabulatelines=\totalnoftabulatelines
- \@EA\dotabulatenobreak
- \else
- \@EA\allowbreak
- \fi}%
- \dotabulateautoline
- \tabulatenoalign
- {\dotabulatenobreak}}
-
-\def\tabulateFLfive{\tabulatenoalign
- {\ifinsidefloat\else
- \doifemptyvalue{\??tt\currenttabulate\c!before} % no expansion
- {\tabulatebaselinecorrection}%
- \fi
- \dotabulaterule
- \dotabulatenobreak
- \dotabulateruleseperator
- \prevdepth\strutdp
- \dotabulatenobreak}}
-
% see ***
%
% \enabletrackers[nodes.page_vspacing]
@@ -1367,39 +1402,116 @@
% \starttabulate[||] \dorecurse{100}{\NC Eins \NC \NR \HL} \stoptabulate
% \stoptext
-\def\tabulateMLfive{\tabulatenoalign % this will be redone in the mkiv spacing way
- {\dotabulateruleseperator
- \dotabulaterule
-% \ifnum\noftabulatelines>\plusone
-% \ifnum\noftabulatelines<\minusnoftabulatelines
-% % *** somehow topskip messes up as it's intercepted
-% % \vskip \topskip\allowbreak\vskip- \topskip
-% % messy anyhow so this needs to be improved, so for
-% % the moment we keep this bugged variant
-% \vskip1\topskip
-% \allowbreak
-% \vskip-1\topskip
- \vskip-\tabulateparameter\c!rulethickness
- \dotabulaterule
-\nobreak
-% \fi
-% \fi
- \dotabulateruleseperator}}
-
-\def\tabulateLLfive{\tabulatenoalign
+% old crap:
+%
+% \ifnum\noftabulatelines>\plusone
+% \ifnum\noftabulatelines<\minusnoftabulatelines
+% % *** somehow topskip messes up as it's intercepted
+% % \vskip \topskip\allowbreak\vskip- \topskip
+% % messy anyhow so this needs to be improved, so for
+% % the moment we keep this bugged variant
+% \vskip1\topskip
+% \allowbreak
+% \vskip-1\topskip
+
+\def\tabulateXXnone{\starttabulatenoalign\tabulateignorehrulespec\stoptabulatenoalign}
+
+\def\tabulateFLfive{\starttabulatenoalign\tabulatepickuphrulespec\dotabulateFLfive}
+\def\tabulateMLfive{\starttabulatenoalign\tabulatepickuphrulespec\dotabulateMLfive}
+\def\tabulateLLfive{\starttabulatenoalign\tabulatepickuphrulespec\dotabulateLLfive}
+\def\tabulateTLfive{\starttabulatenoalign\tabulatepickuphrulespec\dotabulateTLfive}
+\def\tabulateBLfive{\starttabulatenoalign\tabulatepickuphrulespec\dotabulateBLfive}
+%def\tabulateHLfive{\starttabulatenoalign\tabulatepickuphrulespec\dotabulateHLfive}
+
+% we can use one common spec: (saves 4 macros)
+%
+% \def\dotabulatecheckhrulespec#1%
+% {\iftrialtypesetting
+% \expandafter\tabulateignorehrulespec\expandafter\stoptabulatenoalign
+% \else
+% \expandafter\tabulatepickuphrulespec\expandafter\dotabulateFLfive
+% \fi}
+
+\unexpanded\def\dotabulateFLfive
+ {\ifinsidefloat\else
+ \doifemptyvalue{\??tt\currenttabulate\c!before} % no expansion
+ {\tabulatebaselinecorrection}%
+ \fi
+ \dotabulatehrule
+ \dotabulatenobreak
+ \dotabulateruleseparator
+ \prevdepth\strutdp
+ \dotabulatenobreak
+ \stoptabulatenoalign}
+
+\unexpanded\def\dotabulateMLfive
+ {\dotabulateruleseparator
+ \dotabulatehrule
+ \vskip-\tabulateparameter\c!rulethickness
+ \dotabulatehrule
+ \nobreak
+ \dotabulateruleseparator
+ \stoptabulatenoalign}
+
+\unexpanded\def\dotabulateLLfive
{\dotabulatenobreak
- \dotabulateruleseperator
+ \dotabulateruleseparator
\dotabulatenobreak
- \dotabulaterule
+ \dotabulatehrule
\ifinsidefloat\else
\doifemptyvalue{\??tt\currenttabulate\c!after} % no expansion
{\vskip\strutdp
\verticalstrut
\vskip-\struttotal}%
- \fi}}
+ \fi
+ \stoptabulatenoalign}
+
+\unexpanded\def\dotabulateTLfive
+ {\dotabulatenobreak
+ \dotabulateruleseparator
+ \dotabulatenobreak
+ \dotabulatehrule
+ \dotabulatenobreak
+ \dotabulateruleseparator
+ %\prevdepth\strutdp % todo, might differ between TL and BL
+ \dotabulatenobreak
+ \stoptabulatenoalign}
+
+\let\dotabulateBLfive\dotabulateTLfive
\def\tabulateHLfive
- {\doHL\zerocount}
+ {\csname
+ \ifnum\noftabulatelines=\zerocount F\else
+ \ifnum\noftabulatelines=\totalnoftabulatelines L\else
+ M\fi\fi
+ L\endcsname}
+
+\def\tabulatedoHRfive % horizontal rule line (break untested)
+ {\starttabulatenoalign
+ \globallet\dotabulateautoline\dotabulatelinerule
+ %\ifcase#1\or % todo: check what this does
+ \ifnum\noftabulatelines=\zerocount
+ \glet\dotabulateautoline\donothing
+ \else\ifnum\noftabulatelines=\totalnoftabulatelines
+ \glet\dotabulateautoline\donothing
+ \fi\fi
+ %\fi
+ \dotabulatenobreak
+ \stoptabulatenoalign
+ \dotabulateautoline
+ \starttabulatenoalign
+ \nobreak
+ \ifx\dotabulateautoline\dotabulatelinerule\kern-\lineheight\fi
+ \ifnum\noftabulatelines=\totalnoftabulatelines
+ \@EA\dotabulatenobreak
+ \else
+ \@EA\allowbreak
+ \fi
+ \stoptabulatenoalign
+ \dotabulateautoline
+ \starttabulatenoalign
+ \dotabulatenobreak
+ \stoptabulatenoalign}
% \dorecurse{10}{
% \starttabulate[|l|]
@@ -1420,22 +1532,12 @@
% \stoptabulate
% }
-\def\tabulateTLfive{\tabulatenoalign
- {\dotabulatenobreak
- \dotabulateruleseperator
- \dotabulatenobreak
- \dotabulaterule
- \dotabulatenobreak
- \dotabulateruleseperator
- %\prevdepth\strutdp % todo, might differ between TL and BL
- \dotabulatenobreak}}
-
-\let\tabulateBLfive\tabulateTLfive
+% to be redone
-\def\tabulaterule {\HR}% a rule with lineheight
-\def\tabulateline {\HL}% just a spaced rule
-\def\tabulateautorule{\doHR\plusone}%
-\def\tabulateautoline{\doHL\plusone}%
+\def\tabulaterule {\HR} % a rule with lineheight
+\def\tabulateline {\HL} % just a spaced rule
+\def\tabulateautorule{\HR}%
+\def\tabulateautoline{\HL} % no longer different (to be looked into)
% When support for vertical rules we needed a way to pick up the
% specification for the final rule and a \type {|{}} interface was
@@ -1449,6 +1551,9 @@
{\tabulateunit\tabulateparameter\c!unit
\checkfulltabulatecontent
\defaulttabulatevrulethickness\tabulateparameter\c!rulethickness
+ \defaulttabulatehrulethickness\tabulateparameter\c!rulethickness
+ \edef\defaulttabulatevrulecolor{\tabulateparameter\c!rulecolor}%
+ \edef\defaulttabulatehrulecolor{\tabulateparameter\c!rulecolor}%
\edef\@@tabulatealign{\executeifdefined{\??tt:\c!align:\tabulateparameter\c!align}0}%
\tabulateposskip.5\tabulateunit
\tabulatepreskip\zeropoint
@@ -1483,22 +1588,23 @@
\let\NG\tabulateNGone
\let\NN\tabulateNNone
\let\ND\tabulateNDone
- \let\HR\tabulateHRone
- \let\HL\tabulateHLone
\let\NR\tabulateNRone
\let\CC\tabulateCCone
\let\CL\tabulateCLone
\let\CM\tabulateCMone
\let\CR\tabulateCRone
- \let\HL\empty % not needed ? ? ?
- \let\SR\NR \let\AR\NR
- \let\FL\empty \let\FR\NR
- \let\ML\empty \let\MR\NR
- \let\LL\empty \let\LR\NR
- \let\TL\empty
- \let\BL\empty
- \let\doHR\gobbleoneargument
- \let\doHL\gobbleoneargument
+ \let\SR\NR
+ \let\FR\NR
+ \let\MR\NR
+ \let\LR\NR
+ \let\AR\NR
+ \let\FL\tabulateXXnone
+ \let\ML\tabulateXXnone
+ \let\LL\tabulateXXnone
+ \let\TL\tabulateXXnone
+ \let\BL\tabulateXXnone
+ \let\HL\tabulateXXnone
+ \let\HR\tabulateXXnone
\glet\flushtabulated\empty
\tabskip\zeropoint
\ifdim\tabulateparameter\c!margin>\zeropoint
@@ -1560,18 +1666,17 @@
\totalnoftabulatelines\noftabulatelines
\minusnoftabulatelines\numexpr\noftabulatelines+\minusone\relax
\global\noftabulatelines\zerocount
- \let\doHL\tabulatedoHLfour
- \let\doHR\tabulatedoHRfour
- \doifelsevalue{\??tt\currenttabulate\c!rule}\v!line
- {\let\HL\HR
- \let\tabulateautoline\tabulateautorule
- \let\tabulateline\tabulaterule}%
- {\let\HL\tabulateHLfive}%
\let\FL\tabulateFLfive
\let\ML\tabulateMLfive
\let\LL\tabulateLLfive
\let\TL\tabulateTLfive
\let\BL\tabulateBLfive
+ \let\HL\tabulateHLfive
+ \let\HR\tabulateHRfive
+ \doifvalue{\??tt\currenttabulate\c!rule}\v!line
+ {\let\HL\HR
+ \let\tabulateautoline\tabulateautorule
+ \let\tabulateline\tabulaterule}%
\tabulatepass\plustwo
%
\ifcase\tabulaterepeathead
@@ -1590,7 +1695,7 @@
\@EA\halign\@EA{\the\tabulatepreamble\crcr\fulltabulatecontent\crcr}%
\dostoptagged
\dostoptagged
- \prevdepth\strutdp % nog eens beter, temporary hack
+ \ifhmode\par\prevdepth\strutdp\fi % nog eens beter, temporary hack
\doifvalue{\??tt\currenttabulate\c!distance}\v!grid{\vskip-\strutdp}% experimental tm-prikkels
%
\ifcase\tabulaterepeathead
@@ -1660,7 +1765,7 @@
%D \NC tufte \NC \input tufte \NC \NR \tabulateautorule
%D \stoptabulate
%D \stoptyping
-
+
%D Spacing:
%
% \starttabulate
@@ -1672,7 +1777,7 @@
% \stoptabulate
\def\TabulateTB
- {\startTABLEnoalign
+ {\starttabulatenoalign
\dosingleempty\doTabulateTB}
\def\doTabulateTB[#1]%
@@ -1681,10 +1786,24 @@
\else
\blank
\fi
- \stopTABLEnoalign}
+ \stoptabulatenoalign}
+
+% to be tested:
+%
+% \def\TabulateTB
+% {\starttabulatenoalign
+% \doiffastoptionalcheckelse\doTabulateTByes\doTabulateTBnop}
+%
+% \def\doTabulateTByes[#1]%
+% {\blank[#1]
+% \stoptabulatenoalign}
+%
+% \def\doTabulateTBnop[#1]%
+% {\blank
+% \stoptabulatenoalign}
\appendtoks\let\TB\TabulateTB\to\everytabulate
-
+
% \starttabulatie[|mc|]
% \NC \digits{100.000,00} \NC\NR
% \NC \digits{@10.000,00} \NC\NR
@@ -1724,7 +1843,7 @@
% \NC \digits $@@@.@10,@@$ \NC\NR
% \NC \digits $@@@.@@1,@@$ \NC\NR
% \stoptabulatie
-
+
\unexpanded\def\setuptabulate
{\dotripleempty\dosetuptabulate}
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index 5af225bf5..c7f6d5794 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -112,3 +112,27 @@ tasks.disableaction("math", "noads.handlers.tags")
callbacks.freeze("find_.*_file", "find file using resolver")
callbacks.freeze("read_.*_file", "read file at once")
callbacks.freeze("open_.*_file", "open file for reading")
+
+-- experimental:
+
+tasks.freezegroup("processors", "normalizers")
+tasks.freezegroup("processors", "characters")
+tasks.freezegroup("processors", "words")
+tasks.freezegroup("processors", "fonts")
+tasks.freezegroup("processors", "lists")
+
+tasks.freezegroup("finalizers", "normalizers")
+tasks.freezegroup("finalizers", "fonts")
+tasks.freezegroup("finalizers", "lists")
+
+tasks.freezegroup("shipouts", "normalizers")
+tasks.freezegroup("shipouts", "finishers")
+
+tasks.freezegroup("mvlbuilders", "normalizers")
+tasks.freezegroup("vboxbuilders", "normalizers")
+
+--~ tasks.freezegroup("parbuilders", "lists")
+--~ tasks.freezegroup("pagebuilders", "lists")
+
+tasks.freezegroup("math", "normalizers")
+tasks.freezegroup("math", "builders")
diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua
index 7ce2cd345..9f7cb4ca2 100644
--- a/tex/context/base/util-seq.lua
+++ b/tex/context/base/util-seq.lua
@@ -15,6 +15,8 @@ use locals to refer to them when compiling the chain.</p>
-- todo: delayed: i.e. we register them in the right order already but delay usage
+-- todo: protect groups (as in tasks)
+
local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch
local type, loadstring = type, loadstring
@@ -23,15 +25,19 @@ local tables = utilities.tables
local sequencers = { }
utilities.sequencers = sequencers
+local functions = { }
+sequencers.functions = functions
local removevalue, insertaftervalue, insertbeforevalue = tables.removevalue, tables.insertaftervalue, tables.insertbeforevalue
local function validaction(action)
- local g = _G
- for str in gmatch(action,"[^%.]+") do
- g = g[str]
- if not g then
- return false
+ if type(action) == "string" then
+ local g = _G
+ for str in gmatch(action,"[^%.]+") do
+ g = g[str]
+ if not g then
+ return false
+ end
end
end
return true
@@ -39,38 +45,37 @@ end
function sequencers.reset(t)
local s = {
- list = { },
- order = { },
- kind = { },
- askip = { },
- gskip = { },
+ list = { },
+ order = { },
+ kind = { },
+ askip = { },
+ gskip = { },
+ dirty = true,
+ runner = nil,
}
if t then
s.arguments = t.arguments
s.returnvalues = t.returnvalues
s.results = t.results
end
- s.dirty = true
return s
end
function sequencers.prependgroup(t,group,where)
if t then
- local list, order = t.list, t.order
+ local order = t.order
removevalue(order,group)
insertbeforevalue(order,where,group)
- list[group] = { }
- t.dirty = true
+ t.list[group], t.dirty, t.runner = { }, true, nil
end
end
function sequencers.appendgroup(t,group,where)
if t then
- local list, order = t.list, t.order
+ local order = t.order
removevalue(order,group)
insertaftervalue(order,where,group)
- list[group] = { }
- t.dirty = true
+ t.list[group], t.dirty, t.runner = { }, true, nil
end
end
@@ -80,8 +85,7 @@ function sequencers.prependaction(t,group,action,where,kind,force)
if g and (force or validaction(action)) then
removevalue(g,action)
insertbeforevalue(g,where,action)
- t.kind[action] = kind
- t.dirty = true
+ t.kind[action], t.dirty, t.runner = kind, true, nil
end
end
end
@@ -92,21 +96,38 @@ function sequencers.appendaction(t,group,action,where,kind,force)
if g and (force or validaction(action)) then
removevalue(g,action)
insertaftervalue(g,where,action)
- t.kind[action] = kind
- t.dirty = true
+ t.kind[action], t.dirty, t.runner = kind, true, nil
end
end
end
-function sequencers.enableaction (t,action) if t then t.dirty = true t.askip[action] = false end end
-function sequencers.disableaction(t,action) if t then t.dirty = true t.askip[action] = true end end
-function sequencers.enablegroup (t,group) if t then t.dirty = true t.gskip[group] = false end end
-function sequencers.disablegroup (t,group) if t then t.dirty = true t.gskip[group] = true end end
+function sequencers.enableaction (t,action)
+ if t then
+ t.askip[action], t.dirty, t.runner = false, true, nil
+ end
+end
+
+function sequencers.disableaction(t,action)
+ if t then
+ t.askip[action], t.dirty, t.runner = true, true, nil
+ end
+end
+
+function sequencers.enablegroup(t,group)
+ if t then
+ t.gskip[group], t.dirty, t.runner = false, true, nil
+ end
+end
+
+function sequencers.disablegroup(t,group)
+ if t then
+ t.gskip[group], t.dirty, t.runner = true, true, nil
+ end
+end
function sequencers.setkind(t,action,kind)
if t then
- t.kind[action] = kind
- t.dirty = true
+ t.kind[action], t.dirty, t.runner = kind, true, nil
end
end
@@ -114,12 +135,12 @@ function sequencers.removeaction(t,group,action,force)
local g = t and t.list[group]
if g and (force or validaction(action)) then
removevalue(g,action)
- t.dirty = true
+ t.dirty, t.runner = true, nil
end
end
local function localize(str)
- return (gsub(str,"%.","_"))
+ return (gsub(str,"[%.: ]+","_"))
end
local function construct(t,nodummy)
@@ -133,6 +154,11 @@ local function construct(t,nodummy)
for i=1,#actions do
local action = actions[i]
if not askip[action] then
+ if type(action) == "function" then
+ local name = localize(tostring(action))
+ functions[name] = action
+ action = format("utilities.sequencers.functions.%s",name)
+ end
local localized = localize(action)
n = n + 1
variables[n] = format("local %s = %s",localized,action)
@@ -154,25 +180,39 @@ local function construct(t,nodummy)
variables = concat(variables,"\n")
calls = concat(calls,"\n")
if results then
- return format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results)
+ t.compiled = format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results)
else
- return format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls)
+ t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls)
end
+ return t.compiled -- also stored so that we can trace
end
end
sequencers.tostring = construct
sequencers.localize = localize
-function sequencers.compile(t,compiler,n)
+local function compile(t,compiler,n)
if not t or type(t) == "string" then
- -- already compiled
+ t.compiled = t
elseif compiler then
- t = compiler(t,n)
+ t.compiled = compiler(t,n)
else
- t = construct(t)
+ t.compiled = construct(t)
+ end
+ local runner = loadstring(t.compiled)()
+ t.runner = runner
+ return runner -- faster
+end
+
+sequencers.compile = compile
+
+function sequencers.autocompile(t,compiler,n) -- to be used in tasks
+ t.runner = compile(t,compiler,n)
+ local autorunner = function(...)
+ return (t.runner or compile(t,compiler,n))(...) -- ugly but less bytecode
end
- return loadstring(t)()
+ t.autorunner = autorunner
+ return autorunner -- one more encapsulation
end
-- we used to deal with tail as well but now that the lists are always
@@ -219,9 +259,9 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
vars[n] = format("local %s = %s",localized,action)
-- only difference with tostring is kind and rets (why no return)
if kind[action] == "nohead" then
- calls[n] = format(" ok = %s(head%s) done = done or ok -- %s %i",localized,args,group,i)
+ calls[n] = format(" ok = %s(head%s) done = done or ok",localized,args)
else
- calls[n] = format(" head, ok = %s(head%s) done = done or ok -- %s %i",localized,args,group,i)
+ calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args)
end
end
end
diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua
index 6897f2151..6224e159c 100644
--- a/tex/generic/context/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 11/27/10 14:27:23
+-- merge date : 12/01/10 15:28:31
do -- begin closure to overcome local limits and interference
@@ -86,7 +86,7 @@ local patterns_escapes = {
["."] = "%.",
["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
["["] = "%[", ["]"] = "%]",
- ["("] = "%)", [")"] = "%)",
+ ["("] = "%(", [")"] = "%)",
-- ["{"] = "%{", ["}"] = "%}"
-- ["^"] = "%^", ["$"] = "%$",
}
@@ -151,6 +151,7 @@ local patterns = lpeg.patterns
local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match
local Ct, C, Cs, Cc, Cf, Cg = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local lpegtype = lpeg.type
local utfcharacters = string.utfcharacters
local utfgmatch = unicode and unicode.utf8.gmatch
@@ -167,7 +168,6 @@ patterns.alwaysmatched = alwaysmatched
local digit, sign = R('09'), S('+-')
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
local newline = crlf + cr + lf
-local utf8next = R("\128\191")
local escaped = P("\\") * anything
local squote = P("'")
local dquote = P('"')
@@ -188,6 +188,8 @@ local utftype = utfbom_32_be / "utf-32-be" + utfbom_32_le / "utf-32-le
+ utfbom_16_be / "utf-16-be" + utfbom_16_le / "utf-16-le"
+ utfbom_8 / "utf-8" + alwaysmatched / "unknown"
+local utf8next = R("\128\191")
+
patterns.utf8one = R("\000\127")
patterns.utf8two = R("\194\223") * utf8next
patterns.utf8three = R("\224\239") * utf8next * utf8next
@@ -418,19 +420,25 @@ end
-- Just for fun I looked at the used bytecode and
-- p = (p and p + pp) or pp gets one more (testset).
-function lpeg.replacer(t)
- if #t > 0 then
- local p
- for i=1,#t do
- local ti= t[i]
- local pp = P(ti[1]) / ti[2]
- if p then
- p = p + pp
- else
- p = pp
+function lpeg.replacer(one,two)
+ if type(one) == "table" then
+ local no = #one
+ if no > 0 then
+ local p
+ for i=1,no do
+ local o = one[i]
+ local pp = P(o[1]) / o[2]
+ if p then
+ p = p + pp
+ else
+ p = pp
+ end
end
+ return Cs((p + 1)^0)
end
- return Cs((p + 1)^0)
+ else
+ two = two or ""
+ return Cs((P(one)/two + 1)^0)
end
end
@@ -655,6 +663,10 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then")
return p
end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
end -- closure
do -- begin closure to overcome local limits and interference
@@ -2057,6 +2069,9 @@ local separator = P("://")
local qualified = P(".")^0 * P("/") + letter*P(":") + letter^1*separator + letter^1 * P("/")
local rootbased = P("/") + letter*P(":")
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
+
-- ./name ../name /name c: :// name/name
function file.is_qualified_path(filename)