diff options
47 files changed, 1555 insertions, 583 deletions
diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua index c5f09c2c7..677b580ea 100644 --- a/scripts/context/lua/mtx-context.lua +++ b/scripts/context/lua/mtx-context.lua @@ -990,7 +990,7 @@ function scripts.context.purge(all,pattern,mkiitoo) local deleted = { } for i=1,#files do local name = files[i] - local suffix = file.extname(name) + local suffix = file.suffix(name) local basename = file.basename(name) if obsolete[suffix] or temporary[suffix] or persistent[suffix] or generic[basename] then deleted[#deleted+1] = purge_file(name) diff --git a/scripts/context/lua/mtx-convert.lua b/scripts/context/lua/mtx-convert.lua index b4e6e010b..04ff38aad 100644 --- a/scripts/context/lua/mtx-convert.lua +++ b/scripts/context/lua/mtx-convert.lua @@ -83,7 +83,7 @@ function converters.convertpath(inputpath,outputpath) inputpath = inputpath or "." outputpath = outputpath or "." for name in lfs.dir(inputpath) do - local suffix = file.extname(name) + local suffix = file.suffix(name) if find(name,"%.$") then -- skip . and .. elseif converters[suffix] then @@ -102,7 +102,7 @@ function converters.convertpath(inputpath,outputpath) end function converters.convertfile(oldname) - local suffix = file.extname(oldname) + local suffix = file.suffix(oldname) if converters[suffix] then local newname = file.replacesuffix(oldname,"pdf") if oldname == newname then diff --git a/scripts/context/lua/mtx-epub.lua b/scripts/context/lua/mtx-epub.lua index 52a144c20..28a37fec2 100644 --- a/scripts/context/lua/mtx-epub.lua +++ b/scripts/context/lua/mtx-epub.lua @@ -136,7 +136,7 @@ local coverxhtml = [[ local function dumbid(filename) -- return (string.gsub(os.uuid(),"%-%","")) -- to be tested - return file.nameonly(filename) .. "-" .. file.extname(filename) + return file.nameonly(filename) .. "-" .. file.suffix(filename) end local mimetypes = { @@ -275,7 +275,7 @@ function scripts.epub.make() for k, v in table.sortedpairs(images) do theimages[#theimages+1] = k - if not lfs.isfile(k) and file.extname(k) == "svg" and file.extname(v) == "pdf" then + if not lfs.isfile(k) and file.suffix(k) == "svg" and file.suffix(v) == "pdf" then local command = format("inkscape --export-plain-svg=%s %s",k,v) application.report("running command '%s'\n\n",command) os.execute(command) @@ -284,7 +284,7 @@ function scripts.epub.make() copythem(theimages) - local idmaker = idmakers[file.extname(root)] or idmakers.default + local idmaker = idmakers[file.suffix(root)] or idmakers.default container = format(container, epubroot diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua index 8d4280fa0..31ee18ce9 100644 --- a/scripts/context/lua/mtx-fonts.lua +++ b/scripts/context/lua/mtx-fonts.lua @@ -367,7 +367,7 @@ function scripts.fonts.save() if name and name ~= "" then local filename = resolvers.findfile(name) -- maybe also search for opentype if filename and filename ~= "" then - local suffix = string.lower(file.extname(filename)) + local suffix = string.lower(file.suffix(filename)) if suffix == 'ttf' or suffix == 'otf' or suffix == 'ttc' or suffix == "dfont" then local fontinfo = fontloader.info(filename) if fontinfo then diff --git a/scripts/context/lua/mtx-metapost.lua b/scripts/context/lua/mtx-metapost.lua index 3b9ed6ff1..44cf8205d 100644 --- a/scripts/context/lua/mtx-metapost.lua +++ b/scripts/context/lua/mtx-metapost.lua @@ -49,7 +49,7 @@ local tempname = "mptopdf-temp.tex" local function do_convert(filename) if find(filename,".%d+$") or find(filename,"%.mps$") then io.savedata(tempname,format(template,filename)) - local resultname = format("%s-%s.pdf",file.nameonly(filename),file.extname(filename)) + local resultname = format("%s-%s.pdf",file.nameonly(filename),file.suffix(filename)) local result = os.execute(format([[context --once --batch --purge --result=%s "%s"]],resultname,tempname)) return lfs.isfile(resultname) and resultname end diff --git a/scripts/context/lua/mtx-server.lua b/scripts/context/lua/mtx-server.lua index 1a688fb5d..a3771ed5a 100644 --- a/scripts/context/lua/mtx-server.lua +++ b/scripts/context/lua/mtx-server.lua @@ -30,7 +30,7 @@ dofile(resolvers.findfile("l-url.lua","tex")) dofile(resolvers.findfile("luat-soc.lua","tex")) local socket = socket or require("socket") -local http = socket or require("socket.http") -- not needed +local http = http or require("socket.http") -- not needed local format = string.format -- The following two lists are taken from webrick (ruby) and @@ -242,7 +242,7 @@ function handlers.lua(client,configuration,filename,suffix,iscontent,hashed) -- local action = handlers[suffix] or handlers.generic action(client,configuration,result.content,suffix,true) -- content elseif result.filename then - local suffix = file.extname(result.filename) or "text/html" + local suffix = file.suffix(result.filename) or "text/html" local action = handlers[suffix] or handlers.generic action(client,configuration,result.filename,suffix,false) -- filename else @@ -334,7 +334,7 @@ function scripts.webserver.run(configuration) filename = configuration.index report("invalid filename, forcing: %s",filename) end - local suffix = file.extname(filename) + local suffix = file.suffix(filename) local action = handlers[suffix] or handlers.generic if action then report("performing action: %s",filename) diff --git a/scripts/context/lua/mtx-tools.lua b/scripts/context/lua/mtx-tools.lua index 45961a639..c1aaf9e5d 100644 --- a/scripts/context/lua/mtx-tools.lua +++ b/scripts/context/lua/mtx-tools.lua @@ -102,7 +102,7 @@ end function scripts.tools.dirtoxml() - local join, removesuffix, extname, date = file.join, file.removesuffix, file.extname, os.date + local join, removesuffix, suffixonly, date = file.join, file.removesuffix, file.suffixonly, os.date local xmlns = "http://www.pragma-ade.com/rlg/xmldir.rng" local timestamp = "%Y-%m-%d %H:%M" diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua index 413fc9f32..5f2ecc2ac 100644 --- a/scripts/context/lua/mtxrun.lua +++ b/scripts/context/lua/mtxrun.lua @@ -1178,9 +1178,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns -local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match -local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc -local lpegtype = lpeg.type +local P, R, S, V, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc +local lpegtype, lpegmatch = lpeg.type, lpeg.match local utfcharacters = string.utfcharacters local utfgmatch = unicode and unicode.utf8.gmatch @@ -1284,8 +1283,17 @@ patterns.beginline = #(1-newline) -- print(string.unquoted('"test"')) -- print(string.unquoted('"test"')) -function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * V(1) } -- why so complex? +local function anywhere(pattern) --slightly adapted from website + return P { P(pattern) + 1 * V(1) } +end + +lpeg.anywhere = anywhere + +function lpeg.instringchecker(p) + p = anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end end function lpeg.splitter(pattern, action) @@ -1334,7 +1342,7 @@ function string.splitup(str,separator) if not separator then separator = "," end - return match(splitters_m[separator] or splitat(separator),str) + return lpegmatch(splitters_m[separator] or splitat(separator),str) end @@ -1346,7 +1354,7 @@ function lpeg.split(separator,str) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.split(str,separator) @@ -1356,7 +1364,7 @@ function string.split(str,separator) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) else return { str } end @@ -1375,7 +1383,7 @@ local linesplitter = tsplitat(newline) patterns.linesplitter = linesplitter function string.splitlines(str) - return match(linesplitter,str) + return lpegmatch(linesplitter,str) end local utflinesplitter = utfbom^-1 * tsplitat(newline) @@ -1383,7 +1391,7 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline) patterns.utflinesplitter = utflinesplitter function string.utfsplitlines(str) - return match(utflinesplitter,str or "") + return lpegmatch(utflinesplitter,str or "") end @@ -1397,7 +1405,7 @@ function lpeg.checkedsplit(separator,str) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.checkedsplit(str,separator) @@ -1408,7 +1416,7 @@ function string.checkedsplit(str,separator) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end @@ -1519,7 +1527,7 @@ local nany = utf8char/"" function lpeg.counter(pattern) pattern = Cs((P(pattern)/" " + nany)^0) return function(str) - return #match(pattern,str) + return #lpegmatch(pattern,str) end end @@ -1533,7 +1541,7 @@ if utfgmatch then end return n else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1548,9 +1556,9 @@ else p = Cs((P(what)/" " + nany)^0) cache[p] = p end - return #match(p,str) + return #lpegmatch(p,str) else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1577,7 +1585,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0) local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0) function string.escapedpattern(str,simple) - return match(simple and s or p,str) + return lpegmatch(simple and s or p,str) end -- utf extensies @@ -1624,7 +1632,7 @@ else p = P(uc) end end - match((utf8char/f)^0,str) + lpegmatch((utf8char/f)^0,str) return p end @@ -1640,7 +1648,7 @@ function lpeg.UR(str,more) first = str last = more or first else - first, last = match(range,str) + first, last = lpegmatch(range,str) if not last then return P(str) end @@ -1667,11 +1675,15 @@ end -function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") +function lpeg.is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order if type(list) ~= "table" then list = { list, ... } end - -- sort(list) -- longest match first + -- table.sort(list) -- longest match first local p = P(list[1]) for l=2,#list do p = p + P(list[l]) @@ -1679,10 +1691,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") return p end -function lpeg.is_lpeg(p) - return p and lpegtype(p) == "pattern" -end - -- For the moment here, but it might move to utilities. Beware, we need to -- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we -- loop back from the end cq. prepend. @@ -2791,7 +2799,7 @@ local function nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$","")) end -local function extname(name,default) +local function suffixonly(name,default) return match(name,"^.+%.([^/\\]-)$") or default or "" end @@ -2800,11 +2808,16 @@ local function splitname(name) return n or name, s or "" end -file.basename = basename -file.dirname = dirname -file.nameonly = nameonly -file.extname = extname -file.suffix = extname +file.basename = basename + +file.pathpart = dirname +file.dirname = dirname + +file.nameonly = nameonly + +file.suffixonly = suffixonly +file.extname = suffixonly -- obsolete +file.suffix = suffixonly function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) @@ -3215,7 +3228,7 @@ if not modules then modules = { } end modules ['l-url'] = { local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find local concat = table.concat local tonumber, type = tonumber, type -local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct +local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer -- from wikipedia: @@ -3248,15 +3261,19 @@ local endofstring = P(-1) local hexdigit = R("09","AF","af") local plus = P("+") local nothing = Cc("") -local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) +local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar +local escaped = (plus / " ") + escapedchar -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) -- we also assume that when we have a scheme, we also have an authority +-- +-- maybe we should already split the query (better for unescaping as = & can be part of a value local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) local pathstr = Cs((escaped+(1- qmark-hash))^0) -local querystr = Cs((escaped+(1- hash))^0) +----- querystr = Cs((escaped+(1- hash))^0) +local querystr = Cs(( (1- hash))^0) local fragmentstr = Cs((escaped+(1- endofstring))^0) local scheme = schemestr * colon + nothing @@ -3271,11 +3288,19 @@ local parser = Ct(validurl) lpegpatterns.url = validurl lpegpatterns.urlsplitter = parser -local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end +local escapes = { } + +setmetatable(escapes, { __index = function(t,k) + local v = format("%%%02X",byte(k)) + t[k] = v + return v +end }) -local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0) +local escaper = Cs((R("09","AZ","az") + P(" ")/"%%20" + S("-./_") + P(1) / escapes)^0) -- space happens most +local unescaper = Cs((escapedchar + 1)^0) -lpegpatterns.urlescaper = escaper +lpegpatterns.urlescaper = escaper +lpegpatterns.urlunescaper = unescaper -- todo: reconsider Ct as we can as well have five return values (saves a table) -- so we can have two parsers, one with and one without @@ -3311,10 +3336,32 @@ local rootbased = P("/") local barswapper = replacer("|",":") local backslashswapper = replacer("\\","/") +-- queries: + +local equal = P("=") +local amp = P("&") +local key = Cs(((escapedchar+1)-equal )^0) +local value = Cs(((escapedchar+1)-amp -endofstring)^0) + +local splitquery = Cf ( Cc { } * P { "sequence", + sequence = V("pair") * (amp * V("pair"))^0, + pair = Cg(key * equal * value), +}, rawset) + +-- hasher + local function hashed(str) -- not yet ok (/test?test) + if str == "" then + return { + scheme = "invalid", + original = str, + } + end local s = split(str) - local somescheme = s[1] ~= "" - local somequery = s[4] ~= "" + local rawscheme = s[1] + local rawquery = s[4] + local somescheme = rawscheme ~= "" + local somequery = rawquery ~= "" if not somescheme and not somequery then s = { scheme = "file", @@ -3330,14 +3377,17 @@ local function hashed(str) -- not yet ok (/test?test) local authority, path, filename = s[2], s[3] if authority == "" then filename = path + elseif path == "" then + filename = "" else filename = authority .. "/" .. path end s = { - scheme = s[1], + scheme = rawscheme, authority = authority, path = path, - query = s[4], + query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and = + queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped fragment = s[5], original = str, noscheme = false, @@ -3347,6 +3397,8 @@ local function hashed(str) -- not yet ok (/test?test) return s end +-- inspect(hashed("template://test")) + -- Here we assume: -- -- files: /// = relative @@ -3389,23 +3441,65 @@ function url.construct(hash) -- dodo: we need to escape ! return lpegmatch(escaper,concat(fullurl)) end -function url.filename(filename) +function url.filename(filename) -- why no lpeg here ? local t = hashed(filename) return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename end +local function escapestring(str) + return lpegmatch(escaper,str) +end + +url.escape = escapestring + +-- function url.query(str) -- separator could be an option +-- if type(str) == "string" then +-- local t = { } +-- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do +-- t[k] = v +-- end +-- return t +-- else +-- return str +-- end +-- end + function url.query(str) if type(str) == "string" then - local t = { } - for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do - t[k] = v - end - return t + return lpegmatch(splitquery,str) else return str end end +function url.toquery(data) + local td = type(data) + if td == "string" then + return #str and escape(data) or nil -- beware of double escaping + elseif td == "table" then + if next(data) then + local t = { } + for k, v in next, data do + t[#t+1] = format("%s=%s",k,escapestring(v)) + end + return concat(t,"&") + end + else + -- nil is a signal that no query + end +end + +-- /test/ | /test | test/ | test => test + +function url.barepath(path) + if not path or path == "" then + return "" + else + return (gsub(path,"^/?(.-)/?$","%1")) + end +end + + @@ -10946,7 +11040,7 @@ local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string local concat = table.concat local next, type = next, type -local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join +local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) @@ -11655,7 +11749,7 @@ local resolvers = resolvers local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex -local fileextname = file.extname +local suffixonly = file.suffixonly local formats = allocate() local suffixes = allocate() @@ -11910,7 +12004,7 @@ function resolvers.formatofvariable(str) end function resolvers.formatofsuffix(str) -- of file - return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) + return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) end function resolvers.variableofformat(str) @@ -11922,7 +12016,7 @@ function resolvers.variableofformatorsuffix(str) if v then return v end - v = suffixmap[fileextname(str)] + v = suffixmap[suffixonly(str)] if v then return formats[v] end @@ -12478,7 +12572,7 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local filedirname = file.dirname local filebasename = file.basename -local fileextname = file.extname +local suffixonly = file.suffixonly local filejoin = file.join local collapsepath = file.collapsepath local joinpath = file.joinpath @@ -13408,7 +13502,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) local collect_instance_files local function find_analyze(filename,askedformat,allresults) - local filetype, wantedfiles, ext = '', { }, fileextname(filename) + local filetype, wantedfiles, ext = '', { }, suffixonly(filename) -- too tricky as filename can be bla.1.2.3: -- -- if not suffixmap[ext] then @@ -13486,7 +13580,7 @@ local function find_qualified(filename,allresults) -- this one will be split too if trace_detail then report_resolving("locating qualified file '%s'", filename) end - local forcedname, suffix = "", fileextname(filename) + local forcedname, suffix = "", suffixonly(filename) if suffix == "" then -- why local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] if format_suffixes then @@ -15077,7 +15171,7 @@ end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['data-crl'] = { +if not modules then modules = { } end modules ['data-sch'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", @@ -15085,60 +15179,199 @@ if not modules then modules = { } end modules ['data-crl'] = { license = "see context related readme files" } --- this one is replaced by data-sch.lua -- +local loadstring = loadstring +local gsub, concat, format = string.gsub, table.concat, string.format +local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders -local gsub = string.gsub +local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end) +local report_schemes = logs.reporter("resolvers","schemes") -local resolvers = resolvers +local http = require("socket.http") +local ltn12 = require("ltn12") -local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders +local resolvers = resolvers +local schemes = resolvers.schemes or { } +resolvers.schemes = schemes + +local cleaners = { } +schemes.cleaners = cleaners + +local threshold = 24 * 60 * 60 + +directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end) + +function cleaners.none(specification) + return specification.original +end + +function cleaners.strip(specification) + return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods +end + +function cleaners.md5(specification) + return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path)) +end + +local cleaner = cleaners.strip + +directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end) + +function resolvers.schemes.cleanname(specification) + local hash = cleaner(specification) + if trace_schemes then + report_schemes("hashing %s to %s",specification.original,hash) + end + return hash +end -resolvers.curl = resolvers.curl or { } -local curl = resolvers.curl +local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { } -local cached = { } +local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible + local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name + os.spawn(command) +end -local function runcurl(specification) +local function fetch(specification) local original = specification.original - -- local scheme = specification.scheme - local cleanname = gsub(original,"[^%a%d%.]+","-") - local cachename = caches.setfirstwritablefile(cleanname,"curl") + local scheme = specification.scheme + local cleanname = schemes.cleanname(specification) + local cachename = caches.setfirstwritablefile(cleanname,"schemes") if not cached[original] then - if not io.exists(cachename) then + statistics.starttiming(schemes) + if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then cached[original] = cachename - local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original - os.spawn(command) + local handler = handlers[scheme] + if handler then + if trace_schemes then + report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme) + end + logs.flush() + handler(specification,cachename) + else + if trace_schemes then + report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme) + end + logs.flush() + runcurl(original,cachename) + end end if io.exists(cachename) then cached[original] = cachename + if trace_schemes then + report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename) + end else cached[original] = "" + if trace_schemes then + report_schemes("using missing '%s', protocol '%s'",original,scheme) + end end + loaded[scheme] = loaded[scheme] + 1 + statistics.stoptiming(schemes) + else + if trace_schemes then + report_schemes("reusing '%s', protocol '%s'",original,scheme) + end + reused[scheme] = reused[scheme] + 1 end return cached[original] end --- old code: we could be cleaner using specification (see schemes) - local function finder(specification,filetype) - return resolvers.methodhandler("finders",runcurl(specification),filetype) + return resolvers.methodhandler("finders",fetch(specification),filetype) end local opener = openers.file local loader = loaders.file -local function install(scheme) - finders[scheme] = finder - openers[scheme] = opener - loaders[scheme] = loader +local function install(scheme,handler,newthreshold) + handlers [scheme] = handler + loaded [scheme] = 0 + reused [scheme] = 0 + finders [scheme] = finder + openers [scheme] = opener + loaders [scheme] = loader + thresholds[scheme] = newthreshold or threshold end -resolvers.curl.install = install +schemes.install = install -install('http') -install('https') +local function http_handler(specification,cachename) + local tempname = cachename .. ".tmp" + local f = io.open(tempname,"wb") + local status, message = http.request { + url = specification.original, + sink = ltn12.sink.file(f) + } + if not status then + os.remove(tempname) + else + os.remove(cachename) + os.rename(tempname,cachename) + end + return cachename +end + +install('http',http_handler) +install('https') -- see pod install('ftp') +statistics.register("scheme handling time", function() + local l, r, nl, nr = { }, { }, 0, 0 + for k, v in table.sortedhash(loaded) do + if v > 0 then + nl = nl + 1 + l[nl] = k .. ":" .. v + end + end + for k, v in table.sortedhash(reused) do + if v > 0 then + nr = nr + 1 + r[nr] = k .. ":" .. v + end + end + local n = nl + nr + if n > 0 then + l = nl > 0 and concat(l) or "none" + r = nr > 0 and concat(r) or "none" + return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s", + statistics.elapsedtime(schemes), n, threshold, l, r) + else + return nil + end +end) + +-- We provide a few more helpers: + +----- http = require("socket.http") +local httprequest = http.request +local toquery = url.toquery + +-- local function httprequest(url) +-- return os.resultof(format("curl --silent %q", url)) +-- end + +local function fetchstring(url,data) + local q = data and toquery(data) + if q then + url = url .. "?" .. q + end + local reply = httprequest(url) + return reply -- just one argument +end + +schemes.fetchstring = fetchstring + +function schemes.fetchtable(url,data) + local reply = fetchstring(url,data) + if reply then + local s = loadstring("return " .. reply) + if s then + return s() + end + end +end + end -- of closure @@ -15939,7 +16172,7 @@ own.libs = { -- order can be made better -- 'data-bin.lua', 'data-zip.lua', 'data-tre.lua', - 'data-crl.lua', + 'data-sch.lua', 'data-lua.lua', 'data-aux.lua', -- updater 'data-tmf.lua', @@ -16238,7 +16471,8 @@ function runners.execute_script(fullname,internal,nosplit) elseif state == 'skip' then return true elseif state == "run" then - local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), "" + local path, name, suffix = file.splitname(fullname) + local result = "" if path ~= "" then result = fullname elseif name then @@ -16249,7 +16483,7 @@ function runners.execute_script(fullname,internal,nosplit) name = gsub(name,"^script:","") if suffix == "" and runners.registered[name] and runners.registered[name][1] then name = runners.registered[name][1] - suffix = file.extname(name) + suffix = file.suffix(name) end if suffix == "" then -- loop over known suffixes @@ -16276,7 +16510,7 @@ function runners.execute_script(fullname,internal,nosplit) environment.ownscript = result dofile(result) else - local binary = runners.applications[file.extname(result)] + local binary = runners.applications[file.suffix(result)] result = string.quoted(string.unquoted(result)) -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then -- result = '"' .. result .. '"' @@ -16469,7 +16703,7 @@ function resolvers.launch(str) -- maybe we also need to test on mtxrun.launcher.suffix environment -- variable or on windows consult the assoc and ftype vars and such local launchers = runners.launchers[os.platform] if launchers then - local suffix = file.extname(str) if suffix then + local suffix = file.suffix(str) if suffix then local runner = launchers[suffix] if runner then str = runner .. " " .. str end @@ -16528,7 +16762,7 @@ function runners.find_mtx_script(filename) end filename = file.addsuffix(filename,"lua") local basename = file.removesuffix(file.basename(filename)) - local suffix = file.extname(filename) + local suffix = file.suffix(filename) -- qualified path, raw name local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename if fullname and fullname ~= "" then @@ -16583,7 +16817,7 @@ function runners.execute_ctx_script(filename,...) runners.register_arguments(...) local arguments = environment.arguments_after local fullname = runners.find_mtx_script(filename) or "" - if file.extname(fullname) == "cld" then + if file.suffix(fullname) == "cld" then -- handy in editors where we force --autopdf report("running cld script: %s",filename) table.insert(arguments,1,fullname) @@ -16951,7 +17185,7 @@ elseif e_argument("find-path") then elseif e_argument("expand-braces") then - -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename + -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename) resolvers.load("nofiles") runners.register_arguments(filename) diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua index 413fc9f32..5f2ecc2ac 100644 --- a/scripts/context/stubs/mswin/mtxrun.lua +++ b/scripts/context/stubs/mswin/mtxrun.lua @@ -1178,9 +1178,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns -local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match -local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc -local lpegtype = lpeg.type +local P, R, S, V, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc +local lpegtype, lpegmatch = lpeg.type, lpeg.match local utfcharacters = string.utfcharacters local utfgmatch = unicode and unicode.utf8.gmatch @@ -1284,8 +1283,17 @@ patterns.beginline = #(1-newline) -- print(string.unquoted('"test"')) -- print(string.unquoted('"test"')) -function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * V(1) } -- why so complex? +local function anywhere(pattern) --slightly adapted from website + return P { P(pattern) + 1 * V(1) } +end + +lpeg.anywhere = anywhere + +function lpeg.instringchecker(p) + p = anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end end function lpeg.splitter(pattern, action) @@ -1334,7 +1342,7 @@ function string.splitup(str,separator) if not separator then separator = "," end - return match(splitters_m[separator] or splitat(separator),str) + return lpegmatch(splitters_m[separator] or splitat(separator),str) end @@ -1346,7 +1354,7 @@ function lpeg.split(separator,str) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.split(str,separator) @@ -1356,7 +1364,7 @@ function string.split(str,separator) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) else return { str } end @@ -1375,7 +1383,7 @@ local linesplitter = tsplitat(newline) patterns.linesplitter = linesplitter function string.splitlines(str) - return match(linesplitter,str) + return lpegmatch(linesplitter,str) end local utflinesplitter = utfbom^-1 * tsplitat(newline) @@ -1383,7 +1391,7 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline) patterns.utflinesplitter = utflinesplitter function string.utfsplitlines(str) - return match(utflinesplitter,str or "") + return lpegmatch(utflinesplitter,str or "") end @@ -1397,7 +1405,7 @@ function lpeg.checkedsplit(separator,str) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.checkedsplit(str,separator) @@ -1408,7 +1416,7 @@ function string.checkedsplit(str,separator) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end @@ -1519,7 +1527,7 @@ local nany = utf8char/"" function lpeg.counter(pattern) pattern = Cs((P(pattern)/" " + nany)^0) return function(str) - return #match(pattern,str) + return #lpegmatch(pattern,str) end end @@ -1533,7 +1541,7 @@ if utfgmatch then end return n else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1548,9 +1556,9 @@ else p = Cs((P(what)/" " + nany)^0) cache[p] = p end - return #match(p,str) + return #lpegmatch(p,str) else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1577,7 +1585,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0) local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0) function string.escapedpattern(str,simple) - return match(simple and s or p,str) + return lpegmatch(simple and s or p,str) end -- utf extensies @@ -1624,7 +1632,7 @@ else p = P(uc) end end - match((utf8char/f)^0,str) + lpegmatch((utf8char/f)^0,str) return p end @@ -1640,7 +1648,7 @@ function lpeg.UR(str,more) first = str last = more or first else - first, last = match(range,str) + first, last = lpegmatch(range,str) if not last then return P(str) end @@ -1667,11 +1675,15 @@ end -function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") +function lpeg.is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order if type(list) ~= "table" then list = { list, ... } end - -- sort(list) -- longest match first + -- table.sort(list) -- longest match first local p = P(list[1]) for l=2,#list do p = p + P(list[l]) @@ -1679,10 +1691,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") return p end -function lpeg.is_lpeg(p) - return p and lpegtype(p) == "pattern" -end - -- For the moment here, but it might move to utilities. Beware, we need to -- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we -- loop back from the end cq. prepend. @@ -2791,7 +2799,7 @@ local function nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$","")) end -local function extname(name,default) +local function suffixonly(name,default) return match(name,"^.+%.([^/\\]-)$") or default or "" end @@ -2800,11 +2808,16 @@ local function splitname(name) return n or name, s or "" end -file.basename = basename -file.dirname = dirname -file.nameonly = nameonly -file.extname = extname -file.suffix = extname +file.basename = basename + +file.pathpart = dirname +file.dirname = dirname + +file.nameonly = nameonly + +file.suffixonly = suffixonly +file.extname = suffixonly -- obsolete +file.suffix = suffixonly function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) @@ -3215,7 +3228,7 @@ if not modules then modules = { } end modules ['l-url'] = { local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find local concat = table.concat local tonumber, type = tonumber, type -local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct +local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer -- from wikipedia: @@ -3248,15 +3261,19 @@ local endofstring = P(-1) local hexdigit = R("09","AF","af") local plus = P("+") local nothing = Cc("") -local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) +local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar +local escaped = (plus / " ") + escapedchar -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) -- we also assume that when we have a scheme, we also have an authority +-- +-- maybe we should already split the query (better for unescaping as = & can be part of a value local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) local pathstr = Cs((escaped+(1- qmark-hash))^0) -local querystr = Cs((escaped+(1- hash))^0) +----- querystr = Cs((escaped+(1- hash))^0) +local querystr = Cs(( (1- hash))^0) local fragmentstr = Cs((escaped+(1- endofstring))^0) local scheme = schemestr * colon + nothing @@ -3271,11 +3288,19 @@ local parser = Ct(validurl) lpegpatterns.url = validurl lpegpatterns.urlsplitter = parser -local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end +local escapes = { } + +setmetatable(escapes, { __index = function(t,k) + local v = format("%%%02X",byte(k)) + t[k] = v + return v +end }) -local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0) +local escaper = Cs((R("09","AZ","az") + P(" ")/"%%20" + S("-./_") + P(1) / escapes)^0) -- space happens most +local unescaper = Cs((escapedchar + 1)^0) -lpegpatterns.urlescaper = escaper +lpegpatterns.urlescaper = escaper +lpegpatterns.urlunescaper = unescaper -- todo: reconsider Ct as we can as well have five return values (saves a table) -- so we can have two parsers, one with and one without @@ -3311,10 +3336,32 @@ local rootbased = P("/") local barswapper = replacer("|",":") local backslashswapper = replacer("\\","/") +-- queries: + +local equal = P("=") +local amp = P("&") +local key = Cs(((escapedchar+1)-equal )^0) +local value = Cs(((escapedchar+1)-amp -endofstring)^0) + +local splitquery = Cf ( Cc { } * P { "sequence", + sequence = V("pair") * (amp * V("pair"))^0, + pair = Cg(key * equal * value), +}, rawset) + +-- hasher + local function hashed(str) -- not yet ok (/test?test) + if str == "" then + return { + scheme = "invalid", + original = str, + } + end local s = split(str) - local somescheme = s[1] ~= "" - local somequery = s[4] ~= "" + local rawscheme = s[1] + local rawquery = s[4] + local somescheme = rawscheme ~= "" + local somequery = rawquery ~= "" if not somescheme and not somequery then s = { scheme = "file", @@ -3330,14 +3377,17 @@ local function hashed(str) -- not yet ok (/test?test) local authority, path, filename = s[2], s[3] if authority == "" then filename = path + elseif path == "" then + filename = "" else filename = authority .. "/" .. path end s = { - scheme = s[1], + scheme = rawscheme, authority = authority, path = path, - query = s[4], + query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and = + queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped fragment = s[5], original = str, noscheme = false, @@ -3347,6 +3397,8 @@ local function hashed(str) -- not yet ok (/test?test) return s end +-- inspect(hashed("template://test")) + -- Here we assume: -- -- files: /// = relative @@ -3389,23 +3441,65 @@ function url.construct(hash) -- dodo: we need to escape ! return lpegmatch(escaper,concat(fullurl)) end -function url.filename(filename) +function url.filename(filename) -- why no lpeg here ? local t = hashed(filename) return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename end +local function escapestring(str) + return lpegmatch(escaper,str) +end + +url.escape = escapestring + +-- function url.query(str) -- separator could be an option +-- if type(str) == "string" then +-- local t = { } +-- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do +-- t[k] = v +-- end +-- return t +-- else +-- return str +-- end +-- end + function url.query(str) if type(str) == "string" then - local t = { } - for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do - t[k] = v - end - return t + return lpegmatch(splitquery,str) else return str end end +function url.toquery(data) + local td = type(data) + if td == "string" then + return #str and escape(data) or nil -- beware of double escaping + elseif td == "table" then + if next(data) then + local t = { } + for k, v in next, data do + t[#t+1] = format("%s=%s",k,escapestring(v)) + end + return concat(t,"&") + end + else + -- nil is a signal that no query + end +end + +-- /test/ | /test | test/ | test => test + +function url.barepath(path) + if not path or path == "" then + return "" + else + return (gsub(path,"^/?(.-)/?$","%1")) + end +end + + @@ -10946,7 +11040,7 @@ local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string local concat = table.concat local next, type = next, type -local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join +local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) @@ -11655,7 +11749,7 @@ local resolvers = resolvers local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex -local fileextname = file.extname +local suffixonly = file.suffixonly local formats = allocate() local suffixes = allocate() @@ -11910,7 +12004,7 @@ function resolvers.formatofvariable(str) end function resolvers.formatofsuffix(str) -- of file - return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) + return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) end function resolvers.variableofformat(str) @@ -11922,7 +12016,7 @@ function resolvers.variableofformatorsuffix(str) if v then return v end - v = suffixmap[fileextname(str)] + v = suffixmap[suffixonly(str)] if v then return formats[v] end @@ -12478,7 +12572,7 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local filedirname = file.dirname local filebasename = file.basename -local fileextname = file.extname +local suffixonly = file.suffixonly local filejoin = file.join local collapsepath = file.collapsepath local joinpath = file.joinpath @@ -13408,7 +13502,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) local collect_instance_files local function find_analyze(filename,askedformat,allresults) - local filetype, wantedfiles, ext = '', { }, fileextname(filename) + local filetype, wantedfiles, ext = '', { }, suffixonly(filename) -- too tricky as filename can be bla.1.2.3: -- -- if not suffixmap[ext] then @@ -13486,7 +13580,7 @@ local function find_qualified(filename,allresults) -- this one will be split too if trace_detail then report_resolving("locating qualified file '%s'", filename) end - local forcedname, suffix = "", fileextname(filename) + local forcedname, suffix = "", suffixonly(filename) if suffix == "" then -- why local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] if format_suffixes then @@ -15077,7 +15171,7 @@ end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['data-crl'] = { +if not modules then modules = { } end modules ['data-sch'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", @@ -15085,60 +15179,199 @@ if not modules then modules = { } end modules ['data-crl'] = { license = "see context related readme files" } --- this one is replaced by data-sch.lua -- +local loadstring = loadstring +local gsub, concat, format = string.gsub, table.concat, string.format +local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders -local gsub = string.gsub +local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end) +local report_schemes = logs.reporter("resolvers","schemes") -local resolvers = resolvers +local http = require("socket.http") +local ltn12 = require("ltn12") -local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders +local resolvers = resolvers +local schemes = resolvers.schemes or { } +resolvers.schemes = schemes + +local cleaners = { } +schemes.cleaners = cleaners + +local threshold = 24 * 60 * 60 + +directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end) + +function cleaners.none(specification) + return specification.original +end + +function cleaners.strip(specification) + return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods +end + +function cleaners.md5(specification) + return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path)) +end + +local cleaner = cleaners.strip + +directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end) + +function resolvers.schemes.cleanname(specification) + local hash = cleaner(specification) + if trace_schemes then + report_schemes("hashing %s to %s",specification.original,hash) + end + return hash +end -resolvers.curl = resolvers.curl or { } -local curl = resolvers.curl +local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { } -local cached = { } +local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible + local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name + os.spawn(command) +end -local function runcurl(specification) +local function fetch(specification) local original = specification.original - -- local scheme = specification.scheme - local cleanname = gsub(original,"[^%a%d%.]+","-") - local cachename = caches.setfirstwritablefile(cleanname,"curl") + local scheme = specification.scheme + local cleanname = schemes.cleanname(specification) + local cachename = caches.setfirstwritablefile(cleanname,"schemes") if not cached[original] then - if not io.exists(cachename) then + statistics.starttiming(schemes) + if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then cached[original] = cachename - local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original - os.spawn(command) + local handler = handlers[scheme] + if handler then + if trace_schemes then + report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme) + end + logs.flush() + handler(specification,cachename) + else + if trace_schemes then + report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme) + end + logs.flush() + runcurl(original,cachename) + end end if io.exists(cachename) then cached[original] = cachename + if trace_schemes then + report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename) + end else cached[original] = "" + if trace_schemes then + report_schemes("using missing '%s', protocol '%s'",original,scheme) + end end + loaded[scheme] = loaded[scheme] + 1 + statistics.stoptiming(schemes) + else + if trace_schemes then + report_schemes("reusing '%s', protocol '%s'",original,scheme) + end + reused[scheme] = reused[scheme] + 1 end return cached[original] end --- old code: we could be cleaner using specification (see schemes) - local function finder(specification,filetype) - return resolvers.methodhandler("finders",runcurl(specification),filetype) + return resolvers.methodhandler("finders",fetch(specification),filetype) end local opener = openers.file local loader = loaders.file -local function install(scheme) - finders[scheme] = finder - openers[scheme] = opener - loaders[scheme] = loader +local function install(scheme,handler,newthreshold) + handlers [scheme] = handler + loaded [scheme] = 0 + reused [scheme] = 0 + finders [scheme] = finder + openers [scheme] = opener + loaders [scheme] = loader + thresholds[scheme] = newthreshold or threshold end -resolvers.curl.install = install +schemes.install = install -install('http') -install('https') +local function http_handler(specification,cachename) + local tempname = cachename .. ".tmp" + local f = io.open(tempname,"wb") + local status, message = http.request { + url = specification.original, + sink = ltn12.sink.file(f) + } + if not status then + os.remove(tempname) + else + os.remove(cachename) + os.rename(tempname,cachename) + end + return cachename +end + +install('http',http_handler) +install('https') -- see pod install('ftp') +statistics.register("scheme handling time", function() + local l, r, nl, nr = { }, { }, 0, 0 + for k, v in table.sortedhash(loaded) do + if v > 0 then + nl = nl + 1 + l[nl] = k .. ":" .. v + end + end + for k, v in table.sortedhash(reused) do + if v > 0 then + nr = nr + 1 + r[nr] = k .. ":" .. v + end + end + local n = nl + nr + if n > 0 then + l = nl > 0 and concat(l) or "none" + r = nr > 0 and concat(r) or "none" + return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s", + statistics.elapsedtime(schemes), n, threshold, l, r) + else + return nil + end +end) + +-- We provide a few more helpers: + +----- http = require("socket.http") +local httprequest = http.request +local toquery = url.toquery + +-- local function httprequest(url) +-- return os.resultof(format("curl --silent %q", url)) +-- end + +local function fetchstring(url,data) + local q = data and toquery(data) + if q then + url = url .. "?" .. q + end + local reply = httprequest(url) + return reply -- just one argument +end + +schemes.fetchstring = fetchstring + +function schemes.fetchtable(url,data) + local reply = fetchstring(url,data) + if reply then + local s = loadstring("return " .. reply) + if s then + return s() + end + end +end + end -- of closure @@ -15939,7 +16172,7 @@ own.libs = { -- order can be made better -- 'data-bin.lua', 'data-zip.lua', 'data-tre.lua', - 'data-crl.lua', + 'data-sch.lua', 'data-lua.lua', 'data-aux.lua', -- updater 'data-tmf.lua', @@ -16238,7 +16471,8 @@ function runners.execute_script(fullname,internal,nosplit) elseif state == 'skip' then return true elseif state == "run" then - local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), "" + local path, name, suffix = file.splitname(fullname) + local result = "" if path ~= "" then result = fullname elseif name then @@ -16249,7 +16483,7 @@ function runners.execute_script(fullname,internal,nosplit) name = gsub(name,"^script:","") if suffix == "" and runners.registered[name] and runners.registered[name][1] then name = runners.registered[name][1] - suffix = file.extname(name) + suffix = file.suffix(name) end if suffix == "" then -- loop over known suffixes @@ -16276,7 +16510,7 @@ function runners.execute_script(fullname,internal,nosplit) environment.ownscript = result dofile(result) else - local binary = runners.applications[file.extname(result)] + local binary = runners.applications[file.suffix(result)] result = string.quoted(string.unquoted(result)) -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then -- result = '"' .. result .. '"' @@ -16469,7 +16703,7 @@ function resolvers.launch(str) -- maybe we also need to test on mtxrun.launcher.suffix environment -- variable or on windows consult the assoc and ftype vars and such local launchers = runners.launchers[os.platform] if launchers then - local suffix = file.extname(str) if suffix then + local suffix = file.suffix(str) if suffix then local runner = launchers[suffix] if runner then str = runner .. " " .. str end @@ -16528,7 +16762,7 @@ function runners.find_mtx_script(filename) end filename = file.addsuffix(filename,"lua") local basename = file.removesuffix(file.basename(filename)) - local suffix = file.extname(filename) + local suffix = file.suffix(filename) -- qualified path, raw name local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename if fullname and fullname ~= "" then @@ -16583,7 +16817,7 @@ function runners.execute_ctx_script(filename,...) runners.register_arguments(...) local arguments = environment.arguments_after local fullname = runners.find_mtx_script(filename) or "" - if file.extname(fullname) == "cld" then + if file.suffix(fullname) == "cld" then -- handy in editors where we force --autopdf report("running cld script: %s",filename) table.insert(arguments,1,fullname) @@ -16951,7 +17185,7 @@ elseif e_argument("find-path") then elseif e_argument("expand-braces") then - -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename + -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename) resolvers.load("nofiles") runners.register_arguments(filename) diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun index 413fc9f32..5f2ecc2ac 100644 --- a/scripts/context/stubs/unix/mtxrun +++ b/scripts/context/stubs/unix/mtxrun @@ -1178,9 +1178,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns -local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match -local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc -local lpegtype = lpeg.type +local P, R, S, V, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc +local lpegtype, lpegmatch = lpeg.type, lpeg.match local utfcharacters = string.utfcharacters local utfgmatch = unicode and unicode.utf8.gmatch @@ -1284,8 +1283,17 @@ patterns.beginline = #(1-newline) -- print(string.unquoted('"test"')) -- print(string.unquoted('"test"')) -function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * V(1) } -- why so complex? +local function anywhere(pattern) --slightly adapted from website + return P { P(pattern) + 1 * V(1) } +end + +lpeg.anywhere = anywhere + +function lpeg.instringchecker(p) + p = anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end end function lpeg.splitter(pattern, action) @@ -1334,7 +1342,7 @@ function string.splitup(str,separator) if not separator then separator = "," end - return match(splitters_m[separator] or splitat(separator),str) + return lpegmatch(splitters_m[separator] or splitat(separator),str) end @@ -1346,7 +1354,7 @@ function lpeg.split(separator,str) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.split(str,separator) @@ -1356,7 +1364,7 @@ function string.split(str,separator) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) else return { str } end @@ -1375,7 +1383,7 @@ local linesplitter = tsplitat(newline) patterns.linesplitter = linesplitter function string.splitlines(str) - return match(linesplitter,str) + return lpegmatch(linesplitter,str) end local utflinesplitter = utfbom^-1 * tsplitat(newline) @@ -1383,7 +1391,7 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline) patterns.utflinesplitter = utflinesplitter function string.utfsplitlines(str) - return match(utflinesplitter,str or "") + return lpegmatch(utflinesplitter,str or "") end @@ -1397,7 +1405,7 @@ function lpeg.checkedsplit(separator,str) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.checkedsplit(str,separator) @@ -1408,7 +1416,7 @@ function string.checkedsplit(str,separator) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end @@ -1519,7 +1527,7 @@ local nany = utf8char/"" function lpeg.counter(pattern) pattern = Cs((P(pattern)/" " + nany)^0) return function(str) - return #match(pattern,str) + return #lpegmatch(pattern,str) end end @@ -1533,7 +1541,7 @@ if utfgmatch then end return n else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1548,9 +1556,9 @@ else p = Cs((P(what)/" " + nany)^0) cache[p] = p end - return #match(p,str) + return #lpegmatch(p,str) else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1577,7 +1585,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0) local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0) function string.escapedpattern(str,simple) - return match(simple and s or p,str) + return lpegmatch(simple and s or p,str) end -- utf extensies @@ -1624,7 +1632,7 @@ else p = P(uc) end end - match((utf8char/f)^0,str) + lpegmatch((utf8char/f)^0,str) return p end @@ -1640,7 +1648,7 @@ function lpeg.UR(str,more) first = str last = more or first else - first, last = match(range,str) + first, last = lpegmatch(range,str) if not last then return P(str) end @@ -1667,11 +1675,15 @@ end -function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") +function lpeg.is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order if type(list) ~= "table" then list = { list, ... } end - -- sort(list) -- longest match first + -- table.sort(list) -- longest match first local p = P(list[1]) for l=2,#list do p = p + P(list[l]) @@ -1679,10 +1691,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") return p end -function lpeg.is_lpeg(p) - return p and lpegtype(p) == "pattern" -end - -- For the moment here, but it might move to utilities. Beware, we need to -- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we -- loop back from the end cq. prepend. @@ -2791,7 +2799,7 @@ local function nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$","")) end -local function extname(name,default) +local function suffixonly(name,default) return match(name,"^.+%.([^/\\]-)$") or default or "" end @@ -2800,11 +2808,16 @@ local function splitname(name) return n or name, s or "" end -file.basename = basename -file.dirname = dirname -file.nameonly = nameonly -file.extname = extname -file.suffix = extname +file.basename = basename + +file.pathpart = dirname +file.dirname = dirname + +file.nameonly = nameonly + +file.suffixonly = suffixonly +file.extname = suffixonly -- obsolete +file.suffix = suffixonly function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) @@ -3215,7 +3228,7 @@ if not modules then modules = { } end modules ['l-url'] = { local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find local concat = table.concat local tonumber, type = tonumber, type -local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct +local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer -- from wikipedia: @@ -3248,15 +3261,19 @@ local endofstring = P(-1) local hexdigit = R("09","AF","af") local plus = P("+") local nothing = Cc("") -local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) +local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar +local escaped = (plus / " ") + escapedchar -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) -- we also assume that when we have a scheme, we also have an authority +-- +-- maybe we should already split the query (better for unescaping as = & can be part of a value local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) local pathstr = Cs((escaped+(1- qmark-hash))^0) -local querystr = Cs((escaped+(1- hash))^0) +----- querystr = Cs((escaped+(1- hash))^0) +local querystr = Cs(( (1- hash))^0) local fragmentstr = Cs((escaped+(1- endofstring))^0) local scheme = schemestr * colon + nothing @@ -3271,11 +3288,19 @@ local parser = Ct(validurl) lpegpatterns.url = validurl lpegpatterns.urlsplitter = parser -local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end +local escapes = { } + +setmetatable(escapes, { __index = function(t,k) + local v = format("%%%02X",byte(k)) + t[k] = v + return v +end }) -local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0) +local escaper = Cs((R("09","AZ","az") + P(" ")/"%%20" + S("-./_") + P(1) / escapes)^0) -- space happens most +local unescaper = Cs((escapedchar + 1)^0) -lpegpatterns.urlescaper = escaper +lpegpatterns.urlescaper = escaper +lpegpatterns.urlunescaper = unescaper -- todo: reconsider Ct as we can as well have five return values (saves a table) -- so we can have two parsers, one with and one without @@ -3311,10 +3336,32 @@ local rootbased = P("/") local barswapper = replacer("|",":") local backslashswapper = replacer("\\","/") +-- queries: + +local equal = P("=") +local amp = P("&") +local key = Cs(((escapedchar+1)-equal )^0) +local value = Cs(((escapedchar+1)-amp -endofstring)^0) + +local splitquery = Cf ( Cc { } * P { "sequence", + sequence = V("pair") * (amp * V("pair"))^0, + pair = Cg(key * equal * value), +}, rawset) + +-- hasher + local function hashed(str) -- not yet ok (/test?test) + if str == "" then + return { + scheme = "invalid", + original = str, + } + end local s = split(str) - local somescheme = s[1] ~= "" - local somequery = s[4] ~= "" + local rawscheme = s[1] + local rawquery = s[4] + local somescheme = rawscheme ~= "" + local somequery = rawquery ~= "" if not somescheme and not somequery then s = { scheme = "file", @@ -3330,14 +3377,17 @@ local function hashed(str) -- not yet ok (/test?test) local authority, path, filename = s[2], s[3] if authority == "" then filename = path + elseif path == "" then + filename = "" else filename = authority .. "/" .. path end s = { - scheme = s[1], + scheme = rawscheme, authority = authority, path = path, - query = s[4], + query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and = + queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped fragment = s[5], original = str, noscheme = false, @@ -3347,6 +3397,8 @@ local function hashed(str) -- not yet ok (/test?test) return s end +-- inspect(hashed("template://test")) + -- Here we assume: -- -- files: /// = relative @@ -3389,23 +3441,65 @@ function url.construct(hash) -- dodo: we need to escape ! return lpegmatch(escaper,concat(fullurl)) end -function url.filename(filename) +function url.filename(filename) -- why no lpeg here ? local t = hashed(filename) return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename end +local function escapestring(str) + return lpegmatch(escaper,str) +end + +url.escape = escapestring + +-- function url.query(str) -- separator could be an option +-- if type(str) == "string" then +-- local t = { } +-- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do +-- t[k] = v +-- end +-- return t +-- else +-- return str +-- end +-- end + function url.query(str) if type(str) == "string" then - local t = { } - for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do - t[k] = v - end - return t + return lpegmatch(splitquery,str) else return str end end +function url.toquery(data) + local td = type(data) + if td == "string" then + return #str and escape(data) or nil -- beware of double escaping + elseif td == "table" then + if next(data) then + local t = { } + for k, v in next, data do + t[#t+1] = format("%s=%s",k,escapestring(v)) + end + return concat(t,"&") + end + else + -- nil is a signal that no query + end +end + +-- /test/ | /test | test/ | test => test + +function url.barepath(path) + if not path or path == "" then + return "" + else + return (gsub(path,"^/?(.-)/?$","%1")) + end +end + + @@ -10946,7 +11040,7 @@ local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string local concat = table.concat local next, type = next, type -local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join +local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) @@ -11655,7 +11749,7 @@ local resolvers = resolvers local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex -local fileextname = file.extname +local suffixonly = file.suffixonly local formats = allocate() local suffixes = allocate() @@ -11910,7 +12004,7 @@ function resolvers.formatofvariable(str) end function resolvers.formatofsuffix(str) -- of file - return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) + return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) end function resolvers.variableofformat(str) @@ -11922,7 +12016,7 @@ function resolvers.variableofformatorsuffix(str) if v then return v end - v = suffixmap[fileextname(str)] + v = suffixmap[suffixonly(str)] if v then return formats[v] end @@ -12478,7 +12572,7 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local filedirname = file.dirname local filebasename = file.basename -local fileextname = file.extname +local suffixonly = file.suffixonly local filejoin = file.join local collapsepath = file.collapsepath local joinpath = file.joinpath @@ -13408,7 +13502,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) local collect_instance_files local function find_analyze(filename,askedformat,allresults) - local filetype, wantedfiles, ext = '', { }, fileextname(filename) + local filetype, wantedfiles, ext = '', { }, suffixonly(filename) -- too tricky as filename can be bla.1.2.3: -- -- if not suffixmap[ext] then @@ -13486,7 +13580,7 @@ local function find_qualified(filename,allresults) -- this one will be split too if trace_detail then report_resolving("locating qualified file '%s'", filename) end - local forcedname, suffix = "", fileextname(filename) + local forcedname, suffix = "", suffixonly(filename) if suffix == "" then -- why local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] if format_suffixes then @@ -15077,7 +15171,7 @@ end -- of closure do -- create closure to overcome 200 locals limit -if not modules then modules = { } end modules ['data-crl'] = { +if not modules then modules = { } end modules ['data-sch'] = { version = 1.001, comment = "companion to luat-lib.mkiv", author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", @@ -15085,60 +15179,199 @@ if not modules then modules = { } end modules ['data-crl'] = { license = "see context related readme files" } --- this one is replaced by data-sch.lua -- +local loadstring = loadstring +local gsub, concat, format = string.gsub, table.concat, string.format +local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders -local gsub = string.gsub +local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end) +local report_schemes = logs.reporter("resolvers","schemes") -local resolvers = resolvers +local http = require("socket.http") +local ltn12 = require("ltn12") -local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders +local resolvers = resolvers +local schemes = resolvers.schemes or { } +resolvers.schemes = schemes + +local cleaners = { } +schemes.cleaners = cleaners + +local threshold = 24 * 60 * 60 + +directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end) + +function cleaners.none(specification) + return specification.original +end + +function cleaners.strip(specification) + return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods +end + +function cleaners.md5(specification) + return file.addsuffix(md5.hex(specification.original),file.suffix(specification.path)) +end + +local cleaner = cleaners.strip + +directives.register("schemes.cleanmethod", function(v) cleaner = cleaners[v] or cleaners.strip end) + +function resolvers.schemes.cleanname(specification) + local hash = cleaner(specification) + if trace_schemes then + report_schemes("hashing %s to %s",specification.original,hash) + end + return hash +end -resolvers.curl = resolvers.curl or { } -local curl = resolvers.curl +local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { } -local cached = { } +local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible + local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name + os.spawn(command) +end -local function runcurl(specification) +local function fetch(specification) local original = specification.original - -- local scheme = specification.scheme - local cleanname = gsub(original,"[^%a%d%.]+","-") - local cachename = caches.setfirstwritablefile(cleanname,"curl") + local scheme = specification.scheme + local cleanname = schemes.cleanname(specification) + local cachename = caches.setfirstwritablefile(cleanname,"schemes") if not cached[original] then - if not io.exists(cachename) then + statistics.starttiming(schemes) + if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then cached[original] = cachename - local command = "curl --silent --create-dirs --output " .. cachename .. " " .. original - os.spawn(command) + local handler = handlers[scheme] + if handler then + if trace_schemes then + report_schemes("fetching '%s', protocol '%s', method 'built-in'",original,scheme) + end + logs.flush() + handler(specification,cachename) + else + if trace_schemes then + report_schemes("fetching '%s', protocol '%s', method 'curl'",original,scheme) + end + logs.flush() + runcurl(original,cachename) + end end if io.exists(cachename) then cached[original] = cachename + if trace_schemes then + report_schemes("using cached '%s', protocol '%s', cachename '%s'",original,scheme,cachename) + end else cached[original] = "" + if trace_schemes then + report_schemes("using missing '%s', protocol '%s'",original,scheme) + end end + loaded[scheme] = loaded[scheme] + 1 + statistics.stoptiming(schemes) + else + if trace_schemes then + report_schemes("reusing '%s', protocol '%s'",original,scheme) + end + reused[scheme] = reused[scheme] + 1 end return cached[original] end --- old code: we could be cleaner using specification (see schemes) - local function finder(specification,filetype) - return resolvers.methodhandler("finders",runcurl(specification),filetype) + return resolvers.methodhandler("finders",fetch(specification),filetype) end local opener = openers.file local loader = loaders.file -local function install(scheme) - finders[scheme] = finder - openers[scheme] = opener - loaders[scheme] = loader +local function install(scheme,handler,newthreshold) + handlers [scheme] = handler + loaded [scheme] = 0 + reused [scheme] = 0 + finders [scheme] = finder + openers [scheme] = opener + loaders [scheme] = loader + thresholds[scheme] = newthreshold or threshold end -resolvers.curl.install = install +schemes.install = install -install('http') -install('https') +local function http_handler(specification,cachename) + local tempname = cachename .. ".tmp" + local f = io.open(tempname,"wb") + local status, message = http.request { + url = specification.original, + sink = ltn12.sink.file(f) + } + if not status then + os.remove(tempname) + else + os.remove(cachename) + os.rename(tempname,cachename) + end + return cachename +end + +install('http',http_handler) +install('https') -- see pod install('ftp') +statistics.register("scheme handling time", function() + local l, r, nl, nr = { }, { }, 0, 0 + for k, v in table.sortedhash(loaded) do + if v > 0 then + nl = nl + 1 + l[nl] = k .. ":" .. v + end + end + for k, v in table.sortedhash(reused) do + if v > 0 then + nr = nr + 1 + r[nr] = k .. ":" .. v + end + end + local n = nl + nr + if n > 0 then + l = nl > 0 and concat(l) or "none" + r = nr > 0 and concat(r) or "none" + return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s", + statistics.elapsedtime(schemes), n, threshold, l, r) + else + return nil + end +end) + +-- We provide a few more helpers: + +----- http = require("socket.http") +local httprequest = http.request +local toquery = url.toquery + +-- local function httprequest(url) +-- return os.resultof(format("curl --silent %q", url)) +-- end + +local function fetchstring(url,data) + local q = data and toquery(data) + if q then + url = url .. "?" .. q + end + local reply = httprequest(url) + return reply -- just one argument +end + +schemes.fetchstring = fetchstring + +function schemes.fetchtable(url,data) + local reply = fetchstring(url,data) + if reply then + local s = loadstring("return " .. reply) + if s then + return s() + end + end +end + end -- of closure @@ -15939,7 +16172,7 @@ own.libs = { -- order can be made better -- 'data-bin.lua', 'data-zip.lua', 'data-tre.lua', - 'data-crl.lua', + 'data-sch.lua', 'data-lua.lua', 'data-aux.lua', -- updater 'data-tmf.lua', @@ -16238,7 +16471,8 @@ function runners.execute_script(fullname,internal,nosplit) elseif state == 'skip' then return true elseif state == "run" then - local path, name, suffix, result = file.dirname(fullname), file.basename(fullname), file.extname(fullname), "" + local path, name, suffix = file.splitname(fullname) + local result = "" if path ~= "" then result = fullname elseif name then @@ -16249,7 +16483,7 @@ function runners.execute_script(fullname,internal,nosplit) name = gsub(name,"^script:","") if suffix == "" and runners.registered[name] and runners.registered[name][1] then name = runners.registered[name][1] - suffix = file.extname(name) + suffix = file.suffix(name) end if suffix == "" then -- loop over known suffixes @@ -16276,7 +16510,7 @@ function runners.execute_script(fullname,internal,nosplit) environment.ownscript = result dofile(result) else - local binary = runners.applications[file.extname(result)] + local binary = runners.applications[file.suffix(result)] result = string.quoted(string.unquoted(result)) -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then -- result = '"' .. result .. '"' @@ -16469,7 +16703,7 @@ function resolvers.launch(str) -- maybe we also need to test on mtxrun.launcher.suffix environment -- variable or on windows consult the assoc and ftype vars and such local launchers = runners.launchers[os.platform] if launchers then - local suffix = file.extname(str) if suffix then + local suffix = file.suffix(str) if suffix then local runner = launchers[suffix] if runner then str = runner .. " " .. str end @@ -16528,7 +16762,7 @@ function runners.find_mtx_script(filename) end filename = file.addsuffix(filename,"lua") local basename = file.removesuffix(file.basename(filename)) - local suffix = file.extname(filename) + local suffix = file.suffix(filename) -- qualified path, raw name local fullname = file.is_qualified_path(filename) and io.exists(filename) and filename if fullname and fullname ~= "" then @@ -16583,7 +16817,7 @@ function runners.execute_ctx_script(filename,...) runners.register_arguments(...) local arguments = environment.arguments_after local fullname = runners.find_mtx_script(filename) or "" - if file.extname(fullname) == "cld" then + if file.suffix(fullname) == "cld" then -- handy in editors where we force --autopdf report("running cld script: %s",filename) table.insert(arguments,1,fullname) @@ -16951,7 +17185,7 @@ elseif e_argument("find-path") then elseif e_argument("expand-braces") then - -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename + -- luatools: runners.execute_ctx_script("mtx-base","--expand-braces",filename) resolvers.load("nofiles") runners.register_arguments(filename) diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua index 98a3bc9a2..f73ee2d55 100644 --- a/tex/context/base/back-exp.lua +++ b/tex/context/base/back-exp.lua @@ -383,7 +383,7 @@ local function allusedimages(xmlfile) for element, details in sortedhash(usedimages) do for detail, data in sortedhash(details) do local name = data.name - if file.extname(name) == "pdf" then + if file.suffix(name) == "pdf" then -- temp hack .. we will have a remapper name = file.replacesuffix(name,"svg") end @@ -398,7 +398,7 @@ local function uniqueusedimages() for element, details in next, usedimages do for detail, data in next, details do local name = data.name - if file.extname(name) == "pdf" then + if file.suffix(name) == "pdf" then unique[file.replacesuffix(name,"svg")] = name else unique[name] = name diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii index b26c17881..408d2e3d7 100644 --- a/tex/context/base/cont-new.mkii +++ b/tex/context/base/cont-new.mkii @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2012.08.05 12:52} +\newcontextversion{2012.08.08 23:45} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index da0474f1b..04638a53a 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2012.08.05 12:52} +\newcontextversion{2012.08.08 23:45} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf Binary files differindex 88bb269f1..a1e6cab61 100644 --- a/tex/context/base/context-version.pdf +++ b/tex/context/base/context-version.pdf diff --git a/tex/context/base/context-version.png b/tex/context/base/context-version.png Binary files differindex 2cc511153..ab6e4b526 100644 --- a/tex/context/base/context-version.png +++ b/tex/context/base/context-version.png diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii index f2fc58a15..0bd111eb3 100644 --- a/tex/context/base/context.mkii +++ b/tex/context/base/context.mkii @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2012.08.05 12:52} +\edef\contextversion{2012.08.08 23:45} %D For those who want to use this: diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index 06666989d..8fb3c8570 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -25,7 +25,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2012.08.05 12:52} +\edef\contextversion{2012.08.08 23:45} %D For those who want to use this: diff --git a/tex/context/base/core-ctx.lua b/tex/context/base/core-ctx.lua index 1f673d17e..0dfb21ce9 100644 --- a/tex/context/base/core-ctx.lua +++ b/tex/context/base/core-ctx.lua @@ -54,9 +54,9 @@ table.setmetatableindex(ctxrunner.prepfiles,dontpreparefile) local function filtered(str,method) -- in resolvers? str = tostring(str) - if method == 'name' then str = file.removesuffix(file.basename(str)) + if method == 'name' then str = file.nameonly(str) elseif method == 'path' then str = file.dirname(str) - elseif method == 'suffix' then str = file.extname(str) + elseif method == 'suffix' then str = file.suffix(str) elseif method == 'nosuffix' then str = file.removesuffix(str) elseif method == 'nopath' then str = file.basename(str) elseif method == 'base' then str = file.basename(str) diff --git a/tex/context/base/core-sys.lua b/tex/context/base/core-sys.lua index 0be9fd588..703bcac0f 100644 --- a/tex/context/base/core-sys.lua +++ b/tex/context/base/core-sys.lua @@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['core-sys'] = { } local lower, format = string.lower, string.format -local extname, basename, removesuffix = file.extname, file.basename, file.removesuffix +local suffix, basename, removesuffix = file.suffix, file.basename, file.removesuffix local environment = environment @@ -15,9 +15,9 @@ function commands.updatefilenames(inputfilename,outputfilename) environment.inputfilename = inputfilename or "" environment.outputfilename = outputfilename or "" environment.jobfilename = inputfilename or tex.jobname or "" - environment.jobfilesuffix = lower(extname(environment.jobfilename)) + environment.jobfilesuffix = lower(suffix(environment.jobfilename)) environment.inputfilebarename = removesuffix(basename(inputfilename)) - environment.inputfilesuffix = lower(extname(inputfilename)) + environment.inputfilesuffix = lower(suffix(inputfilename)) end statistics.register("result saved in file", function() diff --git a/tex/context/base/data-env.lua b/tex/context/base/data-env.lua index 2d9787b91..f016881b5 100644 --- a/tex/context/base/data-env.lua +++ b/tex/context/base/data-env.lua @@ -12,7 +12,7 @@ local resolvers = resolvers local allocate = utilities.storage.allocate local setmetatableindex = table.setmetatableindex -local fileextname = file.extname +local suffixonly = file.suffixonly local formats = allocate() local suffixes = allocate() @@ -268,7 +268,7 @@ function resolvers.formatofvariable(str) end function resolvers.formatofsuffix(str) -- of file - return suffixmap[fileextname(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) + return suffixmap[suffixonly(str)] or 'tex' -- so many map onto tex (like mkiv, cld etc) end function resolvers.variableofformat(str) @@ -280,7 +280,7 @@ function resolvers.variableofformatorsuffix(str) if v then return v end - v = suffixmap[fileextname(str)] + v = suffixmap[suffixonly(str)] if v then return formats[v] end diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua index 2c263aada..773490785 100644 --- a/tex/context/base/data-ini.lua +++ b/tex/context/base/data-ini.lua @@ -10,7 +10,7 @@ local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string local concat = table.concat local next, type = next, type -local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join +local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua index 6515b4ddb..e51b980ff 100644 --- a/tex/context/base/data-res.lua +++ b/tex/context/base/data-res.lua @@ -24,7 +24,7 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local filedirname = file.dirname local filebasename = file.basename -local fileextname = file.extname +local suffixonly = file.suffixonly local filejoin = file.join local collapsepath = file.collapsepath local joinpath = file.joinpath @@ -962,7 +962,7 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) local collect_instance_files local function find_analyze(filename,askedformat,allresults) - local filetype, wantedfiles, ext = '', { }, fileextname(filename) + local filetype, wantedfiles, ext = '', { }, suffixonly(filename) -- too tricky as filename can be bla.1.2.3: -- -- if not suffixmap[ext] then @@ -1040,7 +1040,7 @@ local function find_qualified(filename,allresults) -- this one will be split too if trace_detail then report_resolving("locating qualified file '%s'", filename) end - local forcedname, suffix = "", fileextname(filename) + local forcedname, suffix = "", suffixonly(filename) if suffix == "" then -- why local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] if format_suffixes then diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua index 253adb9f6..569fa5c94 100644 --- a/tex/context/base/data-sch.lua +++ b/tex/context/base/data-sch.lua @@ -6,26 +6,26 @@ if not modules then modules = { } end modules ['data-sch'] = { license = "see context related readme files" } -local http = require("socket.http") -local ltn12 = require("ltn12") +local loadstring = loadstring local gsub, concat, format = string.gsub, table.concat, string.format local finders, openers, loaders = resolvers.finders, resolvers.openers, resolvers.loaders -local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end) - +local trace_schemes = false trackers.register("resolvers.schemes",function(v) trace_schemes = v end) local report_schemes = logs.reporter("resolvers","schemes") -local resolvers = resolvers +local http = require("socket.http") +local ltn12 = require("ltn12") -resolvers.schemes = resolvers.schemes or { } -local schemes = resolvers.schemes -schemes.threshold = 24 * 60 * 60 +local resolvers = resolvers +local schemes = resolvers.schemes or { } +resolvers.schemes = schemes -directives.register("schemes.threshold", function(v) schemes.threshold = tonumber(v) or schemes.threshold end) +local cleaners = { } +schemes.cleaners = cleaners -local cleaners = { } +local threshold = 24 * 60 * 60 -schemes.cleaners = cleaners +directives.register("schemes.threshold", function(v) threshold = tonumber(v) or threshold end) function cleaners.none(specification) return specification.original @@ -53,7 +53,7 @@ end local cached, loaded, reused, thresholds, handlers = { }, { }, { }, { }, { } -local function runcurl(name,cachename) -- will use sockets instead or the curl library +local function runcurl(name,cachename) -- we use sockets instead or the curl library when possible local command = "curl --silent --create-dirs --output " .. cachename .. " " .. name os.spawn(command) end @@ -65,8 +65,7 @@ local function fetch(specification) local cachename = caches.setfirstwritablefile(cleanname,"schemes") if not cached[original] then statistics.starttiming(schemes) - if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > - (thresholds[protocol] or schemes.threshold)) then + if not io.exists(cachename) or (os.difftime(os.time(),lfs.attributes(cachename).modification) > (thresholds[protocol] or threshold)) then cached[original] = cachename local handler = handlers[scheme] if handler then @@ -112,14 +111,14 @@ end local opener = openers.file local loader = loaders.file -local function install(scheme,handler,threshold) +local function install(scheme,handler,newthreshold) handlers [scheme] = handler loaded [scheme] = 0 reused [scheme] = 0 finders [scheme] = finder openers [scheme] = opener loaders [scheme] = loader - thresholds[scheme] = threshold or schemes.threshold + thresholds[scheme] = newthreshold or threshold end schemes.install = install @@ -160,11 +159,42 @@ statistics.register("scheme handling time", function() end local n = nl + nr if n > 0 then - l = (nl > 0 and concat(l)) or "none" - r = (nr > 0 and concat(r)) or "none" + l = nl > 0 and concat(l) or "none" + r = nr > 0 and concat(r) or "none" return format("%s seconds, %s processed, threshold %s seconds, loaded: %s, reused: %s", - statistics.elapsedtime(schemes), n, schemes.threshold, l, r) + statistics.elapsedtime(schemes), n, threshold, l, r) else return nil end end) + +-- We provide a few more helpers: + +----- http = require("socket.http") +local httprequest = http.request +local toquery = url.toquery + +-- local function httprequest(url) +-- return os.resultof(format("curl --silent %q", url)) +-- end + +local function fetchstring(url,data) + local q = data and toquery(data) + if q then + url = url .. "?" .. q + end + local reply = httprequest(url) + return reply -- just one argument +end + +schemes.fetchstring = fetchstring + +function schemes.fetchtable(url,data) + local reply = fetchstring(url,data) + if reply then + local s = loadstring("return " .. reply) + if s then + return s() + end + end +end diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua index 8fe686693..41f2a012c 100644 --- a/tex/context/base/file-job.lua +++ b/tex/context/base/file-job.lua @@ -30,18 +30,28 @@ local settings_to_array = utilities.parsers.settings_to_array local write_nl = texio.write_nl local allocate = utilities.storage.allocate -local v_outer = variables.outer -local v_text = variables.text -local v_project = variables.project -local v_environment = variables.environment -local v_product = variables.product -local v_component = variables.component -local c_prefix = variables.prefix +local nameonly = file.nameonly +local suffixonly = file.suffix +local basename = file.basename +local addsuffix = file.addsuffix +local removesuffix = file.removesuffix +local dirname = file.dirname +local is_qualified_path = file.is_qualified_path + +local inputstack = resolvers.inputstack + +local v_outer = variables.outer +local v_text = variables.text +local v_project = variables.project +local v_environment = variables.environment +local v_product = variables.product +local v_component = variables.component +local c_prefix = variables.prefix -- main code .. there is some overlap .. here we have loc:// local function findctxfile(name) -- loc ? any ? - if file.is_qualified_path(name) then -- maybe when no suffix do some test for tex + if is_qualified_path(name) then -- maybe when no suffix do some test for tex return name elseif not url.hasscheme(name) then return resolvers.finders.byscheme("loc",name) or "" @@ -64,7 +74,7 @@ function commands.doifinputfileelse(name) end function commands.locatefilepath(name) - context(file.dirname(findctxfile(name))) + context(dirname(findctxfile(name))) end function commands.usepath(paths) @@ -237,7 +247,7 @@ local suffixes = { local function useanyfile(name,onlyonce) local s = suffixes[file.suffix(name)] if s then - s(file.removesuffix(name),onlyonce) + s(removesuffix(name),onlyonce) else usetexfile(name,onlyonce) -- e.g. ctx file --~ resolvers.readfilename(name) @@ -249,7 +259,7 @@ commands.useanyfile = useanyfile function resolvers.jobs.usefile(name,onlyonce,notext) local s = suffixes[file.suffix(name)] if s then - s(file.removesuffix(name),onlyonce,notext) + s(removesuffix(name),onlyonce,notext) end end @@ -444,38 +454,38 @@ local processors = utilities.storage.allocate { -- }, [v_text] = { [v_text] = { "many", context.processfilemany }, - [v_project] = { "none", context.processfileonce }, -- none - [v_environment] = { "once", context.processfileonce }, -- once - [v_product] = { "none", context.processfileonce }, -- none - [v_component] = { "many", context.processfilemany }, -- many + [v_project] = { "none", context.processfileonce }, + [v_environment] = { "once", context.processfileonce }, + [v_product] = { "none", context.processfileonce }, + [v_component] = { "many", context.processfilemany }, }, [v_project] = { [v_text] = { "many", context.processfilemany }, - [v_project] = { "none", context.processfilenone }, -- none - [v_environment] = { "once", context.processfileonce }, -- once - [v_product] = { "once", context.processfilenone }, -- once - [v_component] = { "none", context.processfilenone }, -- many * + [v_project] = { "none", context.processfilenone }, + [v_environment] = { "once", context.processfileonce }, + [v_product] = { "once", context.processfilenone }, + [v_component] = { "none", context.processfilenone }, }, [v_environment] = { [v_text] = { "many", context.processfilemany }, - [v_project] = { "none", context.processfilenone }, -- none - [v_environment] = { "once", context.processfileonce }, -- once - [v_product] = { "none", context.processfilenone }, -- none - [v_component] = { "none", context.processfilenone }, -- none + [v_project] = { "none", context.processfilenone }, + [v_environment] = { "once", context.processfileonce }, + [v_product] = { "none", context.processfilenone }, + [v_component] = { "none", context.processfilenone }, }, [v_product] = { [v_text] = { "many", context.processfilemany }, - [v_project] = { "once", context.processfileonce }, -- once - [v_environment] = { "once", context.processfileonce }, -- once - [v_product] = { "none", context.processfilemany }, -- none - [v_component] = { "many", context.processfilemany }, -- many + [v_project] = { "once", context.processfileonce }, + [v_environment] = { "once", context.processfileonce }, + [v_product] = { "none", context.processfilemany }, + [v_component] = { "many", context.processfilemany }, }, [v_component] = { [v_text] = { "many", context.processfilemany }, - [v_project] = { "once", context.processfileonce }, -- once - [v_environment] = { "once", context.processfileonce }, -- once - [v_product] = { "none", context.processfilenone }, -- none - [v_component] = { "many", context.processfilemany }, -- many + [v_project] = { "once", context.processfileonce }, + [v_environment] = { "once", context.processfileonce }, + [v_product] = { "none", context.processfilenone }, + [v_component] = { "many", context.processfilemany }, } } @@ -615,7 +625,7 @@ local function gotonextlevel(what,name) -- todo: something with suffix name insert(typestack,currenttype) insert(pathstack,currentpath) currenttype = what - currentpath = file.dirname(name) + currentpath = dirname(name) pushtree(what,name) if start[what] then start[what]() @@ -634,10 +644,17 @@ local function gotopreviouslevel(what) context.signalendofinput(what) end -function commands.startproject (name) gotonextlevel(v_project, name) end -function commands.startproduct (name) gotonextlevel(v_product, name) end -function commands.startcomponent (name) gotonextlevel(v_component, name) end -function commands.startenvironment(name) gotonextlevel(v_environment,name) end +local function autoname(name) + if name == "*" then + name = nameonly(inputstack[#inputstack] or name) + end + return name +end + +function commands.startproject (name) gotonextlevel(v_project, autoname(name)) end +function commands.startproduct (name) gotonextlevel(v_product, autoname(name)) end +function commands.startcomponent (name) gotonextlevel(v_component, autoname(name)) end +function commands.startenvironment(name) gotonextlevel(v_environment,autoname(name)) end function commands.stopproject () gotopreviouslevel(v_project ) end function commands.stopproduct () gotopreviouslevel(v_product ) end @@ -678,9 +695,9 @@ end function commands.loadexamodes(filename) if not filename or filename == "" then - filename = file.removesuffix(tex.jobname) + filename = removesuffix(tex.jobname) end - filename = resolvers.findfile(file.addsuffix(filename,'ctm')) or "" + filename = resolvers.findfile(addsuffix(filename,'ctm')) or "" if filename ~= "" then report_examodes("loading %s",filename) -- todo: message system convertexamodes(io.loaddata(filename)) @@ -787,9 +804,9 @@ function commands.getcommandline() -- has to happen at the tex end in order to e local inputfile = validstring(arguments.input) - if inputfile and file.dirname(inputfile) == "." and lfs.isfile(inputfile) then + if inputfile and dirname(inputfile) == "." and lfs.isfile(inputfile) then -- nicer in checks - inputfile = file.basename(inputfile) + inputfile = basename(inputfile) end context.setupsystem { diff --git a/tex/context/base/file-mod.lua b/tex/context/base/file-mod.lua index 1bbe657e0..4a520e998 100644 --- a/tex/context/base/file-mod.lua +++ b/tex/context/base/file-mod.lua @@ -45,7 +45,7 @@ local function usemodule(name,hasscheme) report_modules("checking url: '%s'",fullname) end foundname = resolvers.findtexfile(fullname) or "" - elseif file.extname(name) ~= "" then + elseif file.suffix(name) ~= "" then if trace_modules then report_modules("checking file: '%s'",name) end diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua index a0b1588c8..6074bab18 100644 --- a/tex/context/base/font-con.lua +++ b/tex/context/base/font-con.lua @@ -1073,7 +1073,7 @@ setmetatableindex(formats, function(t,k) t[k] = l return l end - return rawget(t,file.extname(l)) + return rawget(t,file.suffix(l)) end) local locations = { } diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua index e2ff7b355..864e5bd16 100644 --- a/tex/context/base/font-def.lua +++ b/tex/context/base/font-def.lua @@ -198,7 +198,7 @@ function resolvers.spec(specification) if resolved then specification.resolved = resolved specification.sub = sub - specification.forced = file.extname(resolved) + specification.forced = file.suffix(resolved) specification.name = file.removesuffix(resolved) end else diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index 9ca357751..c0ee7dec4 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -46,7 +46,7 @@ end local featuregroups = { "gsub", "gpos" } function fonts.helpers.getfeatures(name,t,script,language) -- maybe per font type - local t = lower(t or (name and file.extname(name)) or "") + local t = lower(t or (name and file.suffix(name)) or "") if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then local filename = resolvers.findfile(name,t) or "" if filename ~= "" then diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index 1f12a1f37..9f0fb6657 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -1976,7 +1976,7 @@ local function read_from_otf(specification) local allfeatures = tfmdata.shared.features or specification.features.normal constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) constructors.setname(tfmdata,specification) -- only otf? - fonts.loggers.register(tfmdata,file.extname(specification.filename),specification) + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) end return tfmdata end diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua index f394a2441..53ef8dd99 100644 --- a/tex/context/base/font-tfm.lua +++ b/tex/context/base/font-tfm.lua @@ -91,7 +91,7 @@ local function read_from_tfm(specification) constructors.enhanceparameters(parameters) -- official copies for us -- if constructors.resolvevirtualtoo then - fonts.loggers.register(tfmdata,file.extname(filename),specification) -- strange, why here + fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here local vfname = findbinfile(specification.name, 'ovf') if vfname and vfname ~= "" then local vfdata = font.read_vf(vfname,size) -- not cached, fast enough diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua index 0b03fbcd1..8672ca2be 100644 --- a/tex/context/base/grph-inc.lua +++ b/tex/context/base/grph-inc.lua @@ -634,7 +634,7 @@ local function locate(request) -- name, format, cache -- we could use the hashed data instead local askedpath= file.is_rootbased_path(askedname) local askedbase = file.basename(askedname) - local askedformat = (request.format ~= "" and request.format ~= "unknown" and request.format) or file.extname(askedname) or "" + local askedformat = (request.format ~= "" and request.format ~= "unknown" and request.format) or file.suffix(askedname) or "" local askedcache = request.cache local askedconversion = request.conversion local askedresolution = request.resolution diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua index c47777bd5..d7b5b7623 100644 --- a/tex/context/base/l-file.lua +++ b/tex/context/base/l-file.lua @@ -34,7 +34,7 @@ local function nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$","")) end -local function extname(name,default) +local function suffixonly(name,default) return match(name,"^.+%.([^/\\]-)$") or default or "" end @@ -43,11 +43,16 @@ local function splitname(name) return n or name, s or "" end -file.basename = basename -file.dirname = dirname -file.nameonly = nameonly -file.extname = extname -file.suffix = extname +file.basename = basename + +file.pathpart = dirname +file.dirname = dirname + +file.nameonly = nameonly + +file.suffixonly = suffixonly +file.extname = suffixonly -- obsolete +file.suffix = suffixonly function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) @@ -323,7 +328,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1 ---~ function file.extname(name) +--~ function file.suffixonly(name) --~ return lpegmatch(pattern,name) or "" --~ end @@ -385,7 +390,7 @@ end --~ end --~ end ---~ local test = file.extname +--~ local test = file.suffixonly --~ local test = file.basename --~ local test = file.dirname --~ local test = file.addsuffix diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua index dc479dd5f..fa3b210af 100644 --- a/tex/context/base/l-lpeg.lua +++ b/tex/context/base/l-lpeg.lua @@ -58,9 +58,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns -local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match -local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc -local lpegtype = lpeg.type +local P, R, S, V, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc +local lpegtype, lpegmatch = lpeg.type, lpeg.match local utfcharacters = string.utfcharacters local utfgmatch = unicode and unicode.utf8.gmatch @@ -164,8 +163,17 @@ patterns.beginline = #(1-newline) -- print(string.unquoted('"test"')) -- print(string.unquoted('"test"')) -function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * V(1) } -- why so complex? +local function anywhere(pattern) --slightly adapted from website + return P { P(pattern) + 1 * V(1) } +end + +lpeg.anywhere = anywhere + +function lpeg.instringchecker(p) + p = anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end end function lpeg.splitter(pattern, action) @@ -214,13 +222,13 @@ function string.splitup(str,separator) if not separator then separator = "," end - return match(splitters_m[separator] or splitat(separator),str) + return lpegmatch(splitters_m[separator] or splitat(separator),str) end ---~ local p = splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more ---~ local p = splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more ---~ local p = splitat("->",false) print(match(p,"oeps")) -- oeps ---~ local p = splitat("->",true) print(match(p,"oeps")) -- oeps +--~ local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more +--~ local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more +--~ local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps +--~ local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps local cache = { } @@ -230,7 +238,7 @@ function lpeg.split(separator,str) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.split(str,separator) @@ -240,7 +248,7 @@ function string.split(str,separator) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) else return { str } end @@ -256,7 +264,7 @@ patterns.textline = content --~ local linesplitter = Ct(content^0) --~ --~ function string.splitlines(str) ---~ return match(linesplitter,str) +--~ return lpegmatch(linesplitter,str) --~ end local linesplitter = tsplitat(newline) @@ -264,7 +272,7 @@ local linesplitter = tsplitat(newline) patterns.linesplitter = linesplitter function string.splitlines(str) - return match(linesplitter,str) + return lpegmatch(linesplitter,str) end local utflinesplitter = utfbom^-1 * tsplitat(newline) @@ -272,7 +280,7 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline) patterns.utflinesplitter = utflinesplitter function string.utfsplitlines(str) - return match(utflinesplitter,str or "") + return lpegmatch(utflinesplitter,str or "") end --~ lpeg.splitters = cache -- no longer public @@ -287,7 +295,7 @@ function lpeg.checkedsplit(separator,str) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.checkedsplit(str,separator) @@ -298,7 +306,7 @@ function string.checkedsplit(str,separator) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end --~ from roberto's site: @@ -313,10 +321,10 @@ patterns.utf8byte = utf8byte --~ local str = " a b c d " ---~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpeg.match(s,str).."]") ---~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpeg.match(s,str).."]") ---~ local s = lpeg.stripper("ab") print("["..lpeg.match(s,str).."]") ---~ local s = lpeg.keeper("ab") print("["..lpeg.match(s,str).."]") +--~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]") local cache = { } @@ -408,14 +416,14 @@ function lpeg.balancer(left,right) return P { left * ((1 - left - right) + V(1))^0 * right } end ---~ print(1,match(lpeg.firstofsplit(":"),"bc:de")) ---~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty ---~ print(3,match(lpeg.firstofsplit(":"),"bc")) ---~ print(4,match(lpeg.secondofsplit(":"),"bc:de")) ---~ print(5,match(lpeg.secondofsplit(":"),"bc:")) -- empty ---~ print(6,match(lpeg.secondofsplit(":",""),"bc")) ---~ print(7,match(lpeg.secondofsplit(":"),"bc")) ---~ print(9,match(lpeg.secondofsplit(":","123"),"bc")) +--~ print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de")) +--~ print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty +--~ print(3,lpegmatch(lpeg.firstofsplit(":"),"bc")) +--~ print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de")) +--~ print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty +--~ print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc")) +--~ print(7,lpegmatch(lpeg.secondofsplit(":"),"bc")) +--~ print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc")) --~ -- slower: --~ @@ -429,7 +437,7 @@ local nany = utf8char/"" function lpeg.counter(pattern) pattern = Cs((P(pattern)/" " + nany)^0) return function(str) - return #match(pattern,str) + return #lpegmatch(pattern,str) end end @@ -443,7 +451,7 @@ if utfgmatch then end return n else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -458,9 +466,9 @@ else p = Cs((P(what)/" " + nany)^0) cache[p] = p end - return #match(p,str) + return #lpegmatch(p,str) else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -487,7 +495,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0) local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0) function string.escapedpattern(str,simple) - return match(simple and s or p,str) + return lpegmatch(simple and s or p,str) end -- utf extensies @@ -534,7 +542,7 @@ else p = P(uc) end end - match((utf8char/f)^0,str) + lpegmatch((utf8char/f)^0,str) return p end @@ -550,7 +558,7 @@ function lpeg.UR(str,more) first = str last = more or first else - first, last = match(range,str) + first, last = lpegmatch(range,str) if not last then return P(str) end @@ -586,11 +594,15 @@ end --~ print(lpeg.count("äáàa",lpeg.UR("àá"))) --~ print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF))) -function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") +function lpeg.is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order if type(list) ~= "table" then list = { list, ... } end - -- sort(list) -- longest match first + -- table.sort(list) -- longest match first local p = P(list[1]) for l=2,#list do p = p + P(list[l]) @@ -598,10 +610,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") return p end -function lpeg.is_lpeg(p) - return p and lpegtype(p) == "pattern" -end - -- For the moment here, but it might move to utilities. Beware, we need to -- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we -- loop back from the end cq. prepend. diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua index fd19efc73..dfd7905ad 100644 --- a/tex/context/base/l-url.lua +++ b/tex/context/base/l-url.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['l-url'] = { local char, gmatch, gsub, format, byte, find = string.char, string.gmatch, string.gsub, string.format, string.byte, string.find local concat = table.concat local tonumber, type = tonumber, type -local P, C, R, S, Cs, Cc, Ct = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct +local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer -- from wikipedia: @@ -42,15 +42,19 @@ local endofstring = P(-1) local hexdigit = R("09","AF","af") local plus = P("+") local nothing = Cc("") -local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) +local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar +local escaped = (plus / " ") + escapedchar -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) -- we also assume that when we have a scheme, we also have an authority +-- +-- maybe we should already split the query (better for unescaping as = & can be part of a value local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) local pathstr = Cs((escaped+(1- qmark-hash))^0) -local querystr = Cs((escaped+(1- hash))^0) +----- querystr = Cs((escaped+(1- hash))^0) +local querystr = Cs(( (1- hash))^0) local fragmentstr = Cs((escaped+(1- endofstring))^0) local scheme = schemestr * colon + nothing @@ -65,11 +69,19 @@ local parser = Ct(validurl) lpegpatterns.url = validurl lpegpatterns.urlsplitter = parser -local escapes = { } ; for i=0,255 do escapes[i] = format("%%%02X",i) end +local escapes = { } + +setmetatable(escapes, { __index = function(t,k) + local v = format("%%%02X",byte(k)) + t[k] = v + return v +end }) -local escaper = Cs((R("09","AZ","az") + S("-./_") + P(1) / escapes)^0) +local escaper = Cs((R("09","AZ","az") + P(" ")/"%%20" + S("-./_") + P(1) / escapes)^0) -- space happens most +local unescaper = Cs((escapedchar + 1)^0) -lpegpatterns.urlescaper = escaper +lpegpatterns.urlescaper = escaper +lpegpatterns.urlunescaper = unescaper -- todo: reconsider Ct as we can as well have five return values (saves a table) -- so we can have two parsers, one with and one without @@ -107,10 +119,32 @@ local rootbased = P("/") local barswapper = replacer("|",":") local backslashswapper = replacer("\\","/") +-- queries: + +local equal = P("=") +local amp = P("&") +local key = Cs(((escapedchar+1)-equal )^0) +local value = Cs(((escapedchar+1)-amp -endofstring)^0) + +local splitquery = Cf ( Cc { } * P { "sequence", + sequence = V("pair") * (amp * V("pair"))^0, + pair = Cg(key * equal * value), +}, rawset) + +-- hasher + local function hashed(str) -- not yet ok (/test?test) + if str == "" then + return { + scheme = "invalid", + original = str, + } + end local s = split(str) - local somescheme = s[1] ~= "" - local somequery = s[4] ~= "" + local rawscheme = s[1] + local rawquery = s[4] + local somescheme = rawscheme ~= "" + local somequery = rawquery ~= "" if not somescheme and not somequery then s = { scheme = "file", @@ -126,14 +160,17 @@ local function hashed(str) -- not yet ok (/test?test) local authority, path, filename = s[2], s[3] if authority == "" then filename = path + elseif path == "" then + filename = "" else filename = authority .. "/" .. path end s = { - scheme = s[1], + scheme = rawscheme, authority = authority, path = path, - query = s[4], + query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and = + queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped fragment = s[5], original = str, noscheme = false, @@ -143,6 +180,8 @@ local function hashed(str) -- not yet ok (/test?test) return s end +-- inspect(hashed("template://test")) + -- Here we assume: -- -- files: /// = relative @@ -193,23 +232,64 @@ function url.construct(hash) -- dodo: we need to escape ! return lpegmatch(escaper,concat(fullurl)) end -function url.filename(filename) +function url.filename(filename) -- why no lpeg here ? local t = hashed(filename) return (t.scheme == "file" and (gsub(t.path,"^/([a-zA-Z])([:|])/)","%1:"))) or filename end +local function escapestring(str) + return lpegmatch(escaper,str) +end + +url.escape = escapestring + +-- function url.query(str) -- separator could be an option +-- if type(str) == "string" then +-- local t = { } +-- for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do +-- t[k] = v +-- end +-- return t +-- else +-- return str +-- end +-- end + function url.query(str) if type(str) == "string" then - local t = { } - for k, v in gmatch(str,"([^&=]*)=([^&=]*)") do - t[k] = v - end - return t + return lpegmatch(splitquery,str) else return str end end +function url.toquery(data) + local td = type(data) + if td == "string" then + return #str and escape(data) or nil -- beware of double escaping + elseif td == "table" then + if next(data) then + local t = { } + for k, v in next, data do + t[#t+1] = format("%s=%s",k,escapestring(v)) + end + return concat(t,"&") + end + else + -- nil is a signal that no query + end +end + +-- /test/ | /test | test/ | test => test + +function url.barepath(path) + if not path or path == "" then + return "" + else + return (gsub(path,"^/?(.-)/?$","%1")) + end +end + --~ print(url.filename("file:///c:/oeps.txt")) --~ print(url.filename("c:/oeps.txt")) --~ print(url.filename("file:///oeps.txt")) @@ -224,6 +304,9 @@ end --~ print(table.serialize(t)) --~ end +--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45")) +--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45")) + --~ test("sys:///./colo-rgb") --~ test("/data/site/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733/figuur-cow.jpg") diff --git a/tex/context/base/lpdf-fmt.lua b/tex/context/base/lpdf-fmt.lua index 8e118db12..3ae41b4f6 100644 --- a/tex/context/base/lpdf-fmt.lua +++ b/tex/context/base/lpdf-fmt.lua @@ -360,7 +360,7 @@ local function loadprofile(name,filename) for i=1,#databases do local filename = locatefile(databases[i]) if filename and filename ~= "" then - local suffix = file.extname(filename) + local suffix = file.suffix(filename) local lname = lower(name) if suffix == "xml" then local xmldata = xml.load(filename) -- no need for caching it diff --git a/tex/context/base/lpdf-u3d.lua b/tex/context/base/lpdf-u3d.lua index ac6038997..f5f66a487 100644 --- a/tex/context/base/lpdf-u3d.lua +++ b/tex/context/base/lpdf-u3d.lua @@ -370,7 +370,7 @@ local function insert3d(spec) -- width, height, factor, display, controls, label subtype = "PRC" elseif find(subdata,"^U3D") then subtype = "U3D" - elseif file.extname(foundname) == "prc" then + elseif file.suffix(foundname) == "prc" then subtype = "PRC" end diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua index 813b35399..08ed22e47 100644 --- a/tex/context/base/mlib-run.lua +++ b/tex/context/base/mlib-run.lua @@ -29,29 +29,29 @@ approach is way faster than an external <l n='metapost'/> and processing time nears zero.</p> --ldx]]-- +local format, gsub, match, find = string.format, string.gsub, string.match, string.find +local emptystring = string.is_empty +local lpegmatch, P = lpeg.match, lpeg.P + local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end) local report_metapost = logs.reporter("metapost") - local texerrormessage = logs.texerrormessage -local format, gsub, match, find = string.format, string.gsub, string.match, string.find -local emptystring = string.is_empty - -local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming -local mplib = mplib +local mplib = mplib +metapost = metapost or { } +local metapost = metapost -metapost = metapost or { } -local metapost = metapost +local mplibone = tonumber(mplib.version()) <= 1.50 metapost.showlog = false metapost.lastlog = "" metapost.texerrors = false metapost.exectime = metapost.exectime or { } -- hack -local mplibone = tonumber(mplib.version()) <= 1.50 - directives.register("mplib.texerrors", function(v) metapost.texerrors = v end) function metapost.resetlastlog() @@ -68,19 +68,57 @@ end -- end -- end -local function i_finder(name, mode, ftype) -- fake message for mpost.map and metafun.mpvi - name = file.is_qualified_path(name) and name or resolvers.findfile(name,ftype) - if not (find(name,"/metapost/context/base/") or find(name,"/metapost/context/") or find(name,"/metapost/base/")) then +----- mpbasepath = lpeg.instringchecker(lpeg.append { "/metapost/context/", "/metapost/base/" }) +local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("base")) * P("/")) + +-- local function i_finder(askedname,mode,ftype) -- fake message for mpost.map and metafun.mpvi +-- local foundname = file.is_qualified_path(askedname) and askedname or resolvers.findfile(askedname,ftype) +-- if not mpbasepath(foundname) then +-- -- we could use the via file but we don't have a complete io interface yet +-- local data, found, forced = metapost.checktexts(io.loaddata(foundname) or "") +-- if found then +-- local tempname = luatex.registertempfile(foundname,true) +-- io.savedata(tempname,data) +-- foundname = tempname +-- end +-- end +-- return foundname +-- end + +-- mplib has no real io interface so we have a different mechanism than +-- tex (as soon as we have more control, we will use the normal code) + +local finders = { } +mplib.finders = finders + +-- for some reason mp sometimes calls this function twice which is inefficient +-- but we cannot catch this + +local function preprocessed(name) + if not mpbasepath(name) then + -- we could use the via file but we don't have a complete io interface yet local data, found, forced = metapost.checktexts(io.loaddata(name) or "") if found then local temp = luatex.registertempfile(name,true) io.savedata(temp,data) - name = temp + return temp end end return name end +mplib.preprocessed = preprocessed -- helper + +finders.file = function(specification,name,mode,ftype) + return preprocessed(resolvers.findfile(name,ftype)) +end + +local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi + local specification = url.hashed(name) + local finder = finders[specification.scheme] or finders.file + return finder(specification,name,mode,ftype) +end + local function o_finder(name, mode, ftype) return name end diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf Binary files differindex 157e012be..00bbd4ee1 100644 --- a/tex/context/base/status-files.pdf +++ b/tex/context/base/status-files.pdf diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf Binary files differindex 49ee75d72..2d0693a79 100644 --- a/tex/context/base/status-lua.pdf +++ b/tex/context/base/status-lua.pdf diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua index 7b7ff67ef..1a9c6b6a1 100644 --- a/tex/context/base/status-mkiv.lua +++ b/tex/context/base/status-mkiv.lua @@ -1639,6 +1639,13 @@ return { }, { category = "mkiv", + comment = "code used in a project", + filename = "meta-lua", + loading = "experimental", + status = "okay", + }, + { + category = "mkiv", filename = "meta-tex", loading = "always", status = "okay", @@ -3723,6 +3730,12 @@ return { }, { category = "lua", + filename = "meta-lua", + loading = "meta-lua", + status = "okay", + }, + { + category = "lua", comment = "could be done nicer nowadays but who needs it", filename = "meta-pdf", loading = "meta-pdf", diff --git a/tex/context/base/strc-flt.mkvi b/tex/context/base/strc-flt.mkvi index 5dd292c82..524cf0f11 100644 --- a/tex/context/base/strc-flt.mkvi +++ b/tex/context/base/strc-flt.mkvi @@ -485,22 +485,34 @@ % start-stop +% \startplacefigure[title=oeps][subtitle=whatever] +% \framed[width=10cm,height=5cm]{\floatuserdataparameter{subtitle}} +% \stopplacefigure + +\installcorenamespace{floatuserdata} + +\installsetuponlycommandhandler \??floatuserdata {floatuserdata} + \unexpanded\def\strc_floats_start_place#tag% {\flushnotes \page_otr_command_flush_side_floats % here ! \strc_floats_begin_group \strc_floats_set_current_tag{#tag}% - \dosingleempty\strc_floats_start_place_indeed} + \dodoubleempty\strc_floats_start_place_indeed} -\def\strc_floats_start_place_indeed[#settings]% tricky ... saved not ok yet +\def\strc_floats_start_place_indeed[#settings][#userdata]% {\strc_floats_reset_variables \edef\savedfloatlocation{\floatcaptionparameter\c!location}% \setupcurrentfloatcaption[\c!location=,\c!reference=,\c!title=,\c!marking=,\c!list=,\c!bookmark=,#settings]% + \def\savedfloatuserdata{#2}% \edef\floatlocation{\floatcaptionparameter\c!location}% \setupcurrentfloatcaption[\c!location=\savedfloatlocation]% \ifx\floatlocation\empty \edef\floatlocation{\floatparameter\c!default}% \fi + \ifsecondargument + \setupcurrentfloatuserdata[#userdata]% + \fi \doifinsetelse\v!split\floatlocation\strc_floats_place_next_box_split\strc_floats_place_next_box_normal \bgroup \ignorespaces} @@ -758,6 +770,9 @@ \newconditional\usesamefloatnumber \def\strc_floats_place_packaged_boxes + {\expandafter\strc_floats_place_packaged_boxes_indeed\expandafter{\savedfloatuserdata}} + +\def\strc_floats_place_packaged_boxes_indeed#userdata% {\bgroup \ifconditional\usesamefloatnumber \globallet\currentfloatnumber \previousfloatnumber @@ -780,7 +795,7 @@ \s!hascaption=\ifnofloatcaption \v!no\else\v!yes\fi,% \s!hasnumber=\ifnofloatnumber \v!no\else\v!yes\fi,% \s!hastitle=\ifemptyfloatcaption\v!no\else\v!yes\fi]% - []% + [#userdata]% \globallet\previousfloatnumber \m_strc_counters_last_registered_index \globallet\currentfloatnumber \m_strc_counters_last_registered_index \globallet\currentfloatattribute \m_strc_counters_last_registered_attribute diff --git a/tex/context/base/strc-lst.mkvi b/tex/context/base/strc-lst.mkvi index 5e2b26cc6..965e53700 100644 --- a/tex/context/base/strc-lst.mkvi +++ b/tex/context/base/strc-lst.mkvi @@ -358,9 +358,15 @@ \ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}% \dostoptagged} +\def\rawstructurelistuservariable#name% + {\ctxcommand{listuserdata("\currentlist",\currentlistindex,"#name")}} + \unexpanded\def\structurelistfirst {\structurelistuservariable\s!first } % s! \unexpanded\def\structurelistsecond{\structurelistuservariable\s!second} % s! +\unexpanded\def\rawstructurelistfirst {\rawstructurelistuservariable\s!first } % s! +\unexpanded\def\rawstructurelistsecond{\rawstructurelistuservariable\s!second} % s! + \unexpanded\def\doifstructurelisthaspageelse {\ctxcommand{doiflisthaspageelse("\currentlist",\currentlistindex)}} diff --git a/tex/context/base/trac-fil.lua b/tex/context/base/trac-fil.lua index 73d5d1c7d..8cc903e2a 100644 --- a/tex/context/base/trac-fil.lua +++ b/tex/context/base/trac-fil.lua @@ -6,13 +6,14 @@ if not modules then modules = { } end modules ['trac-fil'] = { license = "see context related readme files" } -local rawset, tonumber = rawset, tonumber +local rawset, tonumber, type, pcall = rawset, tonumber, type, pcall local format, concat = string.format, table.concat local openfile = io.open local date = os.date local sortedpairs = table.sortedpairs -local P, C, Cc, Cg, Cf, Ct, Cs, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs, lpeg.match +local P, C, Cc, Cg, Cf, Ct, Cs, Carg = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs, lpeg.Carg +local lpegmatch = lpeg.match local patterns = lpeg.patterns local cardinal = patterns.cardinal @@ -44,29 +45,45 @@ patterns.timestamp = timestamp loggers = loggers or { } -local tz = os.timezone(true) - -local bugged = { } - -function loggers.message(filename,t) - if not bugged[filename] then - local f = openfile(filename,"a+") - if not f then - dir.mkdirs(file.dirname(filename)) - f = openfile(filename,"a+") - end - if f then - -- if needed we can speed this up with a concat - f:write("[",date("!%Y-%m-%d %H:%M:%S"),tz,"]") - for k, v in sortedpairs(t) do - f:write(format(" %s=%q",k,v)) +local timeformat = format("[%%s%s]",os.timezone(true)) +local dateformat = "!%Y-%m-%d %H:%M:%S" + +function loggers.makeline(t) + local result = { } -- minimize time that file is open + result[#result+1] = format(timeformat,date(dateformat)) + for k, v in sortedpairs(t) do + local tv = type(v) + if tv == "string" then + if v ~= "password" then + result[#result+1] = format(" %s=%q",k,v) end - f:write("\n") - f:close() - else - bugged[filename] = true + elseif tv == "number" or tv == "boolean" then + result[#result+1] = format(" %s=%q",k,tostring(v)) end end + return concat(result," ") +end + +local function append(filename,...) + local f = openfile(filename,"a+") + if not f then + dir.mkdirs(file.dirname(filename)) + f = openfile(filename,"a+") + end + if f then + f:write(...) + f:close() + return true + else + return false + end +end + +function loggers.store(filename,data) -- a log service is nicer + if type(data) == "table"then + data = loggers.makeline(data) + end + pcall(append,filename,data,"\n") end function loggers.collect(filename,result) @@ -87,60 +104,78 @@ function loggers.collect(filename,result) end end ---~ local template = [[ ---~ <table> ---~ <tr>%s</tr> ---~ %s ---~ </table> ---~ ]] - ---~ function loggers.tohtml(entries,fields) ---~ if not fields or #fields == 0 then ---~ return "" ---~ end ---~ if type(entries) == "string" then ---~ entries = loggers.collect(entries) ---~ end ---~ local scratch, lines = { }, { } ---~ for i=1,#entries do ---~ local entry = entries[i] ---~ local status = entry.status ---~ for i=1,#fields do ---~ local field = fields[i] ---~ local v = status[field.name] ---~ if v ~= nil then ---~ v = tostring(v) ---~ local f = field.format ---~ if f then v = format(f,v) end ---~ scratch[i] = format("<td nowrap='nowrap' align='%s'>%s</td>",field.align or "left",v) ---~ else ---~ scratch[i] = "<td/>" ---~ end ---~ end ---~ lines[i] = "<tr>" .. concat(scratch) .. "</tr>" ---~ end ---~ for i=1,#fields do ---~ local field = fields[i] ---~ scratch[i] = format("<th nowrap='nowrap' align='left'>%s</th>", field.label or field.name) ---~ end ---~ local result = format(template,concat(scratch),concat(lines,"\n")) ---~ return result, entries ---~ end - ---~ -- loggers.message("test.log","name","whatever","more",123) - ---~ local fields = { ---~ -- { name = "id", align = "left" }, ---~ -- { name = "timestamp", align = "left" }, ---~ { name = "assessment", align = "left" }, ---~ { name = "assessmentname", align = "left" }, ---~ -- { name = "category", align = "left" }, ---~ { name = "filesize", align = "right" }, ---~ { name = "nofimages", align = "center" }, ---~ -- { name = "product", align = "left" }, ---~ { name = "resultsize", align = "right" }, ---~ { name = "fetchtime", align = "right", format = "%2.3f" }, ---~ { name = "runtime", align = "right", format = "%2.3f" }, ---~ { name = "organization", align = "left" }, ---~ -- { name = "username", align = "left" }, ---~ } +function loggers.fields(results) -- returns hash of fields with counts so that we can decide on importance + local fields = { } + if results then + for i=1,#results do + local r = results[i] + for k, v in next, r do + local f = fields[k] + if not f then + fields[k] = 1 + else + fields[k] = f + 1 + end + end + end + end + return fields +end + +local template = [[<!-- log entries: begin --!> +<table> +<tr>%s</tr> +%s +</table> +<!-- log entries: end --!> +]] + +function loggers.tohtml(entries,fields) + if not fields or #fields == 0 then + return "" + end + if type(entries) == "string" then + entries = loggers.collect(entries) + end + local scratch, lines = { }, { } + for i=1,#entries do + local entry = entries[i] + local status = entry.status + for i=1,#fields do + local field = fields[i] + local v = status[field.name] + if v ~= nil then + v = tostring(v) + local f = field.format + if f then + v = format(f,v) + end + scratch[i] = format("<td nowrap='nowrap' align='%s'>%s</td>",field.align or "left",v) + else + scratch[i] = "<td/>" + end + end + lines[i] = format("<tr>%s</tr>",concat(scratch)) + end + for i=1,#fields do + local field = fields[i] + scratch[i] = format("<th nowrap='nowrap' align='left'>%s</th>", field.label or field.name) + end + local result = format(template,concat(scratch),concat(lines,"\n")) + return result, entries +end + +-- loggers.store("test.log", { name = "whatever", more = math.random(1,100) }) + +-- local fields = { +-- { name = "name", align = "left" }, +-- { name = "more", align = "right" }, +-- } + +-- local entries = loggers.collect("test.log") +-- local html = loggers.tohtml(entries,fields) + +-- inspect(entries) +-- inspect(fields) +-- inspect(html) + diff --git a/tex/context/base/util-jsn.lua b/tex/context/base/util-jsn.lua index 7de6e2729..6ca956c02 100644 --- a/tex/context/base/util-jsn.lua +++ b/tex/context/base/util-jsn.lua @@ -128,7 +128,7 @@ function json.tostring(value) end -- local tmp = [[ { "a" : true, "b" : [ 123 , 456E-10, { "a" : true, "b" : [ 123 , 456 ] } ] } ]] --- + -- tmp = json.tolua(tmp) -- inspect(tmp) -- tmp = json.tostring(tmp) @@ -137,7 +137,7 @@ end -- inspect(tmp) -- tmp = json.tostring(tmp) -- inspect(tmp) --- + -- inspect(json.tostring(true)) return json diff --git a/tex/generic/context/luatex/luatex-basics-gen.lua b/tex/generic/context/luatex/luatex-basics-gen.lua index bdbc3cf51..2f03efba8 100644 --- a/tex/generic/context/luatex/luatex-basics-gen.lua +++ b/tex/generic/context/luatex/luatex-basics-gen.lua @@ -87,7 +87,7 @@ local remapper = { function resolvers.findfile(name,fileformat) name = string.gsub(name,"\\","\/") fileformat = fileformat and string.lower(fileformat) - local found = kpse.find_file(name,(fileformat and fileformat ~= "" and (remapper[fileformat] or fileformat)) or file.extname(name,"tex")) + local found = kpse.find_file(name,(fileformat and fileformat ~= "" and (remapper[fileformat] or fileformat)) or file.suffix(name,"tex")) if not found or found == "" then found = kpse.find_file(name,"other text files") end @@ -96,7 +96,7 @@ end function resolvers.findbinfile(name,fileformat) if not fileformat or fileformat == "" then - fileformat = file.extname(name) -- string.match(name,"%.([^%.]-)$") + fileformat = file.suffix(name) -- string.match(name,"%.([^%.]-)$") end return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) end diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index b908c64b9..48800990d 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 08/05/12 12:52:29 +-- merge date : 08/08/12 23:45:30 do -- begin closure to overcome local limits and interference @@ -1164,9 +1164,8 @@ local byte, char, gmatch = string.byte, string.char, string.gmatch lpeg.patterns = lpeg.patterns or { } -- so that we can share local patterns = lpeg.patterns -local P, R, S, V, match = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.match -local Ct, C, Cs, Cc = lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc -local lpegtype = lpeg.type +local P, R, S, V, Ct, C, Cs, Cc = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc +local lpegtype, lpegmatch = lpeg.type, lpeg.match local utfcharacters = string.utfcharacters local utfgmatch = unicode and unicode.utf8.gmatch @@ -1270,8 +1269,17 @@ patterns.beginline = #(1-newline) -- print(string.unquoted('"test"')) -- print(string.unquoted('"test"')) -function lpeg.anywhere(pattern) --slightly adapted from website - return P { P(pattern) + 1 * V(1) } -- why so complex? +local function anywhere(pattern) --slightly adapted from website + return P { P(pattern) + 1 * V(1) } +end + +lpeg.anywhere = anywhere + +function lpeg.instringchecker(p) + p = anywhere(p) + return function(str) + return lpegmatch(p,str) and true or false + end end function lpeg.splitter(pattern, action) @@ -1320,13 +1328,13 @@ function string.splitup(str,separator) if not separator then separator = "," end - return match(splitters_m[separator] or splitat(separator),str) + return lpegmatch(splitters_m[separator] or splitat(separator),str) end ---~ local p = splitat("->",false) print(match(p,"oeps->what->more")) -- oeps what more ---~ local p = splitat("->",true) print(match(p,"oeps->what->more")) -- oeps what->more ---~ local p = splitat("->",false) print(match(p,"oeps")) -- oeps ---~ local p = splitat("->",true) print(match(p,"oeps")) -- oeps +--~ local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more +--~ local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more +--~ local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps +--~ local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps local cache = { } @@ -1336,7 +1344,7 @@ function lpeg.split(separator,str) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.split(str,separator) @@ -1346,7 +1354,7 @@ function string.split(str,separator) c = tsplitat(separator) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) else return { str } end @@ -1362,7 +1370,7 @@ patterns.textline = content --~ local linesplitter = Ct(content^0) --~ --~ function string.splitlines(str) ---~ return match(linesplitter,str) +--~ return lpegmatch(linesplitter,str) --~ end local linesplitter = tsplitat(newline) @@ -1370,7 +1378,7 @@ local linesplitter = tsplitat(newline) patterns.linesplitter = linesplitter function string.splitlines(str) - return match(linesplitter,str) + return lpegmatch(linesplitter,str) end local utflinesplitter = utfbom^-1 * tsplitat(newline) @@ -1378,7 +1386,7 @@ local utflinesplitter = utfbom^-1 * tsplitat(newline) patterns.utflinesplitter = utflinesplitter function string.utfsplitlines(str) - return match(utflinesplitter,str or "") + return lpegmatch(utflinesplitter,str or "") end --~ lpeg.splitters = cache -- no longer public @@ -1393,7 +1401,7 @@ function lpeg.checkedsplit(separator,str) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end function string.checkedsplit(str,separator) @@ -1404,7 +1412,7 @@ function string.checkedsplit(str,separator) c = Ct(separator^0 * other * (separator^1 * other)^0) cache[separator] = c end - return match(c,str) + return lpegmatch(c,str) end --~ from roberto's site: @@ -1419,10 +1427,10 @@ patterns.utf8byte = utf8byte --~ local str = " a b c d " ---~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpeg.match(s,str).."]") ---~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpeg.match(s,str).."]") ---~ local s = lpeg.stripper("ab") print("["..lpeg.match(s,str).."]") ---~ local s = lpeg.keeper("ab") print("["..lpeg.match(s,str).."]") +--~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]") +--~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]") local cache = { } @@ -1514,14 +1522,14 @@ function lpeg.balancer(left,right) return P { left * ((1 - left - right) + V(1))^0 * right } end ---~ print(1,match(lpeg.firstofsplit(":"),"bc:de")) ---~ print(2,match(lpeg.firstofsplit(":"),":de")) -- empty ---~ print(3,match(lpeg.firstofsplit(":"),"bc")) ---~ print(4,match(lpeg.secondofsplit(":"),"bc:de")) ---~ print(5,match(lpeg.secondofsplit(":"),"bc:")) -- empty ---~ print(6,match(lpeg.secondofsplit(":",""),"bc")) ---~ print(7,match(lpeg.secondofsplit(":"),"bc")) ---~ print(9,match(lpeg.secondofsplit(":","123"),"bc")) +--~ print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de")) +--~ print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty +--~ print(3,lpegmatch(lpeg.firstofsplit(":"),"bc")) +--~ print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de")) +--~ print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty +--~ print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc")) +--~ print(7,lpegmatch(lpeg.secondofsplit(":"),"bc")) +--~ print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc")) --~ -- slower: --~ @@ -1535,7 +1543,7 @@ local nany = utf8char/"" function lpeg.counter(pattern) pattern = Cs((P(pattern)/" " + nany)^0) return function(str) - return #match(pattern,str) + return #lpegmatch(pattern,str) end end @@ -1549,7 +1557,7 @@ if utfgmatch then end return n else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1564,9 +1572,9 @@ else p = Cs((P(what)/" " + nany)^0) cache[p] = p end - return #match(p,str) + return #lpegmatch(p,str) else -- 4 times slower but still faster than / function - return #match(Cs((P(what)/" " + nany)^0),str) + return #lpegmatch(Cs((P(what)/" " + nany)^0),str) end end @@ -1593,7 +1601,7 @@ local p = Cs((S("-.+*%()[]") / patterns_escapes + anything)^0) local s = Cs((S("-.+*%()[]") / simple_escapes + anything)^0) function string.escapedpattern(str,simple) - return match(simple and s or p,str) + return lpegmatch(simple and s or p,str) end -- utf extensies @@ -1640,7 +1648,7 @@ else p = P(uc) end end - match((utf8char/f)^0,str) + lpegmatch((utf8char/f)^0,str) return p end @@ -1656,7 +1664,7 @@ function lpeg.UR(str,more) first = str last = more or first else - first, last = match(range,str) + first, last = lpegmatch(range,str) if not last then return P(str) end @@ -1692,11 +1700,15 @@ end --~ print(lpeg.count("äáàa",lpeg.UR("àá"))) --~ print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF))) -function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") +function lpeg.is_lpeg(p) + return p and lpegtype(p) == "pattern" +end + +function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order if type(list) ~= "table" then list = { list, ... } end - -- sort(list) -- longest match first + -- table.sort(list) -- longest match first local p = P(list[1]) for l=2,#list do p = p + P(list[l]) @@ -1704,10 +1716,6 @@ function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") return p end -function lpeg.is_lpeg(p) - return p and lpegtype(p) == "pattern" -end - -- For the moment here, but it might move to utilities. Beware, we need to -- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we -- loop back from the end cq. prepend. @@ -1999,7 +2007,7 @@ local function nameonly(name) return (gsub(match(name,"^.+[/\\](.-)$") or name,"%.[%a%d]+$","")) end -local function extname(name,default) +local function suffixonly(name,default) return match(name,"^.+%.([^/\\]-)$") or default or "" end @@ -2008,11 +2016,16 @@ local function splitname(name) return n or name, s or "" end -file.basename = basename -file.dirname = dirname -file.nameonly = nameonly -file.extname = extname -file.suffix = extname +file.basename = basename + +file.pathpart = dirname +file.dirname = dirname + +file.nameonly = nameonly + +file.suffixonly = suffixonly +file.extname = suffixonly -- obsolete +file.suffix = suffixonly function file.removesuffix(filename) return (gsub(filename,"%.[%a%d]+$","")) @@ -2288,7 +2301,7 @@ end --~ local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1 ---~ function file.extname(name) +--~ function file.suffixonly(name) --~ return lpegmatch(pattern,name) or "" --~ end @@ -2350,7 +2363,7 @@ end --~ end --~ end ---~ local test = file.extname +--~ local test = file.suffixonly --~ local test = file.basename --~ local test = file.dirname --~ local test = file.addsuffix @@ -2862,7 +2875,7 @@ local remapper = { function resolvers.findfile(name,fileformat) name = string.gsub(name,"\\","\/") fileformat = fileformat and string.lower(fileformat) - local found = kpse.find_file(name,(fileformat and fileformat ~= "" and (remapper[fileformat] or fileformat)) or file.extname(name,"tex")) + local found = kpse.find_file(name,(fileformat and fileformat ~= "" and (remapper[fileformat] or fileformat)) or file.suffix(name,"tex")) if not found or found == "" then found = kpse.find_file(name,"other text files") end @@ -2871,7 +2884,7 @@ end function resolvers.findbinfile(name,fileformat) if not fileformat or fileformat == "" then - fileformat = file.extname(name) -- string.match(name,"%.([^%.]-)$") + fileformat = file.suffix(name) -- string.match(name,"%.([^%.]-)$") end return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat) end @@ -4399,7 +4412,7 @@ setmetatableindex(formats, function(t,k) t[k] = l return l end - return rawget(t,file.extname(l)) + return rawget(t,file.suffix(l)) end) local locations = { } @@ -7398,7 +7411,7 @@ local function read_from_otf(specification) local allfeatures = tfmdata.shared.features or specification.features.normal constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf) constructors.setname(tfmdata,specification) -- only otf? - fonts.loggers.register(tfmdata,file.extname(specification.filename),specification) + fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification) end return tfmdata end @@ -12449,7 +12462,7 @@ function resolvers.spec(specification) if resolved then specification.resolved = resolved specification.sub = sub - specification.forced = file.extname(resolved) + specification.forced = file.suffix(resolved) specification.name = file.removesuffix(resolved) end else diff --git a/tex/generic/context/luatex/luatex-mplib.tex b/tex/generic/context/luatex/luatex-mplib.tex index ef6dfff95..8af9f2d8a 100644 --- a/tex/generic/context/luatex/luatex-mplib.tex +++ b/tex/generic/context/luatex/luatex-mplib.tex @@ -31,8 +31,15 @@ \def\setmplibformat#1{\def\mplibformat{#1}} \def\setupmplibcatcodes - {\catcode`\{=12 \catcode`\}=12 \catcode`\#=12 \catcode`\^=12 \catcode`\~=12 - \catcode`\_=12 \catcode`\%=12 \catcode`\&=12 \catcode`\$=12 } + {\catcode`\{=12 % could be optional .. not really needed + \catcode`\}=12 % could be optional .. not really needed + \catcode`\#=12 + \catcode`\^=12 + \catcode`\~=12 + \catcode`\_=12 + \catcode`\%=12 + \catcode`\&=12 + \catcode`\$=12 } \def\mplibcode {\bgroup |