From a220826721f9023e2a97c46bf61463651b289c64 Mon Sep 17 00:00:00 2001 From: Hans Hagen Date: Thu, 3 Jul 2014 14:52:00 +0200 Subject: beta 2014.07.03 14:52 --- tex/context/base/cont-new.mkiv | 2 +- tex/context/base/context-version.pdf | Bin 4354 -> 4427 bytes tex/context/base/context.mkiv | 2 +- tex/context/base/core-env.mkiv | 2 + tex/context/base/data-exp.lua | 326 ++++++++-------- tex/context/base/data-fil.lua | 28 +- tex/context/base/data-ini.lua | 131 ++++++- tex/context/base/data-lst.lua | 26 +- tex/context/base/data-lua.lua | 23 +- tex/context/base/data-met.lua | 2 - tex/context/base/data-pre.lua | 203 ++-------- tex/context/base/data-res.lua | 412 ++++++++++----------- tex/context/base/data-tmf.lua | 2 +- tex/context/base/data-tmp.lua | 23 +- tex/context/base/data-tre.lua | 117 ++++-- tex/context/base/data-zip.lua | 50 ++- tex/context/base/file-job.lua | 3 +- tex/context/base/font-map.lua | 2 + tex/context/base/font-mis.lua | 2 +- tex/context/base/font-otf.lua | 10 +- tex/context/base/font-syn.lua | 8 +- tex/context/base/l-dir.lua | 303 +++++++++------ tex/context/base/l-table.lua | 50 ++- tex/context/base/lpdf-fmt.lua | 4 +- tex/context/base/lpdf-pda.xml | 9 +- tex/context/base/lpdf-pdx.xml | 8 +- tex/context/base/lpdf-xmp.lua | 6 +- tex/context/base/lxml-tex.lua | 67 ++-- tex/context/base/mlib-lua.lua | 67 +++- tex/context/base/mult-low.lua | 2 +- tex/context/base/publ-aut.lua | 284 ++++---------- tex/context/base/publ-dat.lua | 16 + tex/context/base/publ-imp-author.mkvi | 278 ++++++++++++++ tex/context/base/publ-imp-cite.mkvi | 78 ++-- tex/context/base/publ-ini.lua | 198 +++++----- tex/context/base/publ-ini.mkiv | 252 +++++++------ tex/context/base/status-files.pdf | Bin 24880 -> 24927 bytes tex/context/base/status-lua.pdf | Bin 248312 -> 249534 bytes tex/context/base/strc-con.mkvi | 9 +- tex/context/base/strc-reg.lua | 116 +++++- tex/context/base/util-env.lua | 42 +-- tex/context/base/util-str.lua | 20 + tex/context/test/pdf-a1b-2005.mkiv | 6 +- tex/generic/context/luatex/luatex-fonts-merged.lua | 63 +++- 44 files changed, 1912 insertions(+), 1340 deletions(-) create mode 100644 tex/context/base/publ-imp-author.mkvi (limited to 'tex') diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 53a57ef9c..0c5dbb485 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2014.06.27 10:53} +\newcontextversion{2014.07.03 14:52} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf index 303d0d600..b95c4f339 100644 Binary files a/tex/context/base/context-version.pdf and b/tex/context/base/context-version.pdf differ diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index b936c288d..de7086acb 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -28,7 +28,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2014.06.27 10:53} +\edef\contextversion{2014.07.03 14:52} \edef\contextkind {beta} %D For those who want to use this: diff --git a/tex/context/base/core-env.mkiv b/tex/context/base/core-env.mkiv index 9207d9f4a..30fc83b4b 100644 --- a/tex/context/base/core-env.mkiv +++ b/tex/context/base/core-env.mkiv @@ -390,6 +390,8 @@ % but it won't work out well with multiple setups (intercepted at the % lua end) that then get only one argument. +\def\fastsetup#1{\csname\??setup:#1\endcsname\empty} % no checking and we assume it being defined (at least for now) + % the next one is meant for \c!setups situations, hence the check for % a shortcut diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua index 9534e73a0..6edaa8c6a 100644 --- a/tex/context/base/data-exp.lua +++ b/tex/context/base/data-exp.lua @@ -13,14 +13,17 @@ local Ct, Cs, Cc, Carg, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.P, local type, next = type, next local ostype = os.type -local collapsepath = file.collapsepath +local collapsepath, joinpath, basename = file.collapsepath, file.join, file.basename local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_expansions = false trackers.register("resolvers.expansions", function(v) trace_expansions = v end) +local trace_globbing = true trackers.register("resolvers.globbing", function(v) trace_globbing = v end) local report_expansions = logs.reporter("resolvers","expansions") +local report_globbing = logs.reporter("resolvers","globbing") -local resolvers = resolvers +local resolvers = resolvers +local resolveprefix = resolvers.resolve -- As this bit of code is somewhat special it gets its own module. After -- all, when working on the main resolver code, I don't want to scroll @@ -177,34 +180,28 @@ end -- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} -- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} -local cleanup = lpeg.replacer { - { "!" , "" }, - { "\\" , "/" }, -} +local usedhomedir = nil +local donegation = (P("!") /"" )^0 +local doslashes = (P("\\")/"/" + 1)^0 -function resolvers.cleanpath(str) -- tricky, maybe only simple paths - local doslashes = (P("\\")/"/" + 1)^0 - local donegation = (P("!") /"" )^0 - local homedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "") - if homedir == "~" or homedir == "" or not lfs.isdir(homedir) then - if trace_expansions then - report_expansions("no home dir set, ignoring dependent paths") - end - function resolvers.cleanpath(str) - if not str or find(str,"~",1,true) then - return "" -- special case - else - return lpegmatch(cleanup,str) +local function expandedhome() + if not usedhomedir then + usedhomedir = lpegmatch(Cs(donegation * doslashes),environment.homedir or "") + if usedhomedir == "~" or usedhomedir == "" or not lfs.isdir(usedhomedir) then + if trace_expansions then + report_expansions("no home dir set, ignoring dependent path using current path") end - end - else - local dohome = ((P("~")+P("$HOME"))/homedir)^0 - local cleanup = Cs(donegation * dohome * doslashes) - function resolvers.cleanpath(str) - return str and lpegmatch(cleanup,str) or "" + usedhomedir = "." end end - return resolvers.cleanpath(str) + return usedhomedir +end + +local dohome = ((P("~")+P("$HOME")+P("%HOME%"))/expandedhome)^0 +local cleanup = Cs(donegation * dohome * doslashes) + +resolvers.cleanpath = function(str) + return str and lpegmatch(cleanup,str) or "" end -- print(resolvers.cleanpath("")) @@ -216,11 +213,18 @@ end -- This one strips quotes and funny tokens. -local expandhome = P("~") / "$HOME" -- environment.homedir or "home:" +-- we have several options here: +-- +-- expandhome = P("~") / "$HOME" : relocateble +-- expandhome = P("~") / "home:" : relocateble +-- expandhome = P("~") / environment.homedir : frozen but unexpanded +-- expandhome = P("~") = dohome : frozen and expanded + +local expandhome = P("~") / "$HOME" -local dodouble = P('"')/"" * (expandhome + (1 - P('"')))^0 * P('"')/"" -local dosingle = P("'")/"" * (expandhome + (1 - P("'")))^0 * P("'")/"" -local dostring = (expandhome + 1 )^0 +local dodouble = P('"') / "" * (expandhome + (1 - P('"')))^0 * P('"') / "" +local dosingle = P("'") / "" * (expandhome + (1 - P("'")))^0 * P("'") / "" +local dostring = (expandhome + 1 )^0 local stripper = Cs( lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer @@ -285,7 +289,7 @@ end function resolvers.joinpath(str) if type(str) == 'table' then - return file.joinpath(str) + return joinpath(str) else return str end @@ -293,25 +297,25 @@ end -- The next function scans directories and returns a hash where the -- entries are either strings or tables. - +-- -- starting with . or .. etc or funny char - ---~ local l_forbidden = S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") ---~ local l_confusing = P(" ") ---~ local l_character = lpegpatterns.utf8 ---~ local l_dangerous = P(".") - ---~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * P(-1) ---~ ----- l_normal = l_normal * Cc(true) + Cc(false) - ---~ local function test(str) ---~ print(str,lpegmatch(l_normal,str)) ---~ end ---~ test("ヒラギノ明朝 Pro W3") ---~ test("..ヒラギノ明朝 Pro W3") ---~ test(":ヒラギノ明朝 Pro W3;") ---~ test("ヒラギノ明朝 /Pro W3;") ---~ test("ヒラギノ明朝 Pro W3") +-- +-- local l_forbidden = S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +-- local l_confusing = P(" ") +-- local l_character = lpegpatterns.utf8 +-- local l_dangerous = P(".") +-- +-- local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * P(-1) +-- ----- l_normal = l_normal * Cc(true) + Cc(false) +-- +-- local function test(str) +-- print(str,lpegmatch(l_normal,str)) +-- end +-- test("ヒラギノ明朝 Pro W3") +-- test("..ヒラギノ明朝 Pro W3") +-- test(":ヒラギノ明朝 Pro W3;") +-- test("ヒラギノ明朝 /Pro W3;") +-- test("ヒラギノ明朝 Pro W3") -- a lot of this caching can be stripped away when we have ssd's everywhere -- @@ -319,41 +323,65 @@ end local attributes, directory = lfs.attributes, lfs.dir -local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) -local timer = { } -local scanned = { } -local nofscans = 0 -local scancache = { } +local weird = P(".")^1 + lpeg.anywhere(S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +local timer = { } +local scanned = { } +local nofscans = 0 +local scancache = { } +local fullcache = { } +----- simplecache = { } +local nofsharedscans = 0 + +-- So, we assume either a lowercase name or a mixed case one but only one such case +-- as having Foo fOo foo FoO FOo etc on the system is braindead in any sane project. -local function scan(files,spec,path,n,m,r) - local full = (path == "" and spec) or (spec .. path .. '/') +local function scan(files,remap,spec,path,n,m,r,onlyone) + local full = path == "" and spec or (spec .. path .. '/') local dirs = { } local nofdirs = 0 for name in directory(full) do if not lpegmatch(weird,name) then - local mode = attributes(full..name,'mode') - if mode == 'file' then + local mode = attributes(full..name,"mode") + if mode == "file" then n = n + 1 - local f = files[name] - if f then - if type(f) == 'string' then - files[name] = { f, path } + local lower = lower(name) + local paths = files[lower] + if paths then + if onlyone then + -- forget about it else - f[#f+1] = path + if type(paths) == "string" then + files[lower] = { paths, path } + else + paths[#paths+1] = path + end + if name ~= lower then + local rl = remap[lower] + if not rl then + remap[lower] = name + r = r + 1 + elseif trace_globbing and rl ~= name then + report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl) + end + end end else -- probably unique anyway - files[name] = path - local lower = lower(name) + files[lower] = path if name ~= lower then - files["remap:"..lower] = name - r = r + 1 + local rl = remap[lower] + if not rl then + remap[lower] = name + r = r + 1 + elseif trace_globbing and rl ~= name then + report_globbing("confusing filename, name: %a, lower: %a, already: %a",name,lower,rl) + end end end - elseif mode == 'directory' then + elseif mode == "directory" then m = m + 1 nofdirs = nofdirs + 1 if path ~= "" then - dirs[nofdirs] = path..'/'..name + dirs[nofdirs] = path .. "/" .. name else dirs[nofdirs] = name end @@ -363,113 +391,55 @@ local function scan(files,spec,path,n,m,r) if nofdirs > 0 then sort(dirs) for i=1,nofdirs do - files, n, m, r = scan(files,spec,dirs[i],n,m,r) + files, remap, n, m, r = scan(files,remap,spec,dirs[i],n,m,r,onlyone) end end scancache[sub(full,1,-2)] = files - return files, n, m, r + return files, remap, n, m, r end -local fullcache = { } - -function resolvers.scanfiles(path,branch,usecache) - statistics.starttiming(timer) - local realpath = resolvers.resolve(path) -- no shortcut +function resolvers.scanfiles(path,branch,usecache,onlyonce) + local realpath = resolveprefix(path) if usecache then - local files = fullcache[realpath] - if files then + local content = fullcache[realpath] + if content then if trace_locating then - report_expansions("using caches scan of path %a, branch %a",path,branch or path) + report_expansions("using cached scan of path %a, branch %a",path,branch or path) end - return files + nofsharedscans = nofsharedscans + 1 + return content end end + -- + statistics.starttiming(timer) if trace_locating then report_expansions("scanning path %a, branch %a",path,branch or path) end - local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) - files.__path__ = path -- can be selfautoparent:texmf-whatever - files.__files__ = n - files.__directories__ = m - files.__remappings__ = r + local files, remap, n, m, r = scan({ },{ },realpath .. '/',"",0,0,0,onlyonce) + local content = { + metadata = { + path = path, -- can be selfautoparent:texmf-whatever + files = n, + directories = m, + remappings = r, + }, + files = files, + remap = remap, + } if trace_locating then report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) end if usecache then scanned[#scanned+1] = realpath - fullcache[realpath] = files + fullcache[realpath] = content end nofscans = nofscans + 1 statistics.stoptiming(timer) - return files -end - -local function simplescan(files,spec,path) -- first match only, no map and such - local full = (path == "" and spec) or (spec .. path .. '/') - local dirs = { } - local nofdirs = 0 - for name in directory(full) do - if not lpegmatch(weird,name) then - local mode = attributes(full..name,'mode') - if mode == 'file' then - if not files[name] then - -- only first match - files[name] = path - end - elseif mode == 'directory' then - nofdirs = nofdirs + 1 - if path ~= "" then - dirs[nofdirs] = path..'/'..name - else - dirs[nofdirs] = name - end - end - end - end - if nofdirs > 0 then - sort(dirs) - for i=1,nofdirs do - files = simplescan(files,spec,dirs[i]) - end - end - return files + return content end -local simplecache = { } -local nofsharedscans = 0 - function resolvers.simplescanfiles(path,branch,usecache) - statistics.starttiming(timer) - local realpath = resolvers.resolve(path) -- no shortcut - if usecache then - local files = simplecache[realpath] - if not files then - files = scancache[realpath] - if files then - nofsharedscans = nofsharedscans + 1 - end - end - if files then - if trace_locating then - report_expansions("using caches scan of path %a, branch %a",path,branch or path) - end - return files - end - end - if trace_locating then - report_expansions("scanning path %a, branch %a",path,branch or path) - end - local files = simplescan({ },realpath .. '/',"") - if trace_locating then - report_expansions("%s files found",table.count(files)) - end - if usecache then - scanned[#scanned+1] = realpath - simplecache[realpath] = files - end - nofscans = nofscans + 1 - statistics.stoptiming(timer) - return files + return resolvers.scanfiles(path,branch,usecache,true) -- onlyonce end function resolvers.scandata() @@ -482,4 +452,54 @@ function resolvers.scandata() } end ---~ print(table.serialize(resolvers.scanfiles("t:/sources"))) +function resolvers.get_from_content(content,path,name) -- or (content,name) + local files = content.files + if not files then + return + end + local remap = content.remap + if not remap then + return + end + if name then + -- this one resolves a remapped name + local used = lower(name) + return path, remap[used] or used + else + -- this one does a lookup and resolves a remapped name + local name = path + local used = lower(name) + local path = files[used] + if path then + return path, remap[used] or used + end + end +end + +local nothing = function() end + +function resolvers.filtered_from_content(content,pattern) + if content and type(pattern) == "string" then + local pattern = lower(pattern) + local files = content.files + local remap = content.remap + if files and remap then + local n = next(files) + local function iterator() + while n do + local k = n + n = next(files,k) + if find(k,pattern) then + return files[k], remap and remap[k] or k + end + end + end + return iterator + end + end + return nothing +end + + +-- inspect(resolvers.simplescanfiles("e:/temporary/mb-mp")) +-- inspect(resolvers.scanfiles("e:/temporary/mb-mp")) diff --git a/tex/context/base/data-fil.lua b/tex/context/base/data-fil.lua index 09129e03c..b699fc9e3 100644 --- a/tex/context/base/data-fil.lua +++ b/tex/context/base/data-fil.lua @@ -10,7 +10,8 @@ local trace_locating = false trackers.register("resolvers.locating", function(v local report_files = logs.reporter("resolvers","files") -local resolvers = resolvers +local resolvers = resolvers +local resolveprefix = resolvers.resolve local finders, openers, loaders, savers = resolvers.finders, resolvers.openers, resolvers.loaders, resolvers.savers local locators, hashers, generators, concatinators = resolvers.locators, resolvers.hashers, resolvers.generators, resolvers.concatinators @@ -18,35 +19,34 @@ local locators, hashers, generators, concatinators = resolvers.locators, resolve local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check function locators.file(specification) - local name = specification.filename - local realname = resolvers.resolve(name) -- no shortcut + local filename = specification.filename + local realname = resolveprefix(filename) -- no shortcut if realname and realname ~= '' and lfs.isdir(realname) then if trace_locating then - report_files("file locator %a found as %a",name,realname) + report_files("file locator %a found as %a",filename,realname) end - resolvers.appendhash('file',name,true) -- cache + resolvers.appendhash('file',filename,true) -- cache elseif trace_locating then - report_files("file locator %a not found",name) + report_files("file locator %a not found",filename) end end function hashers.file(specification) - local name = specification.filename - local content = caches.loadcontent(name,'files') - resolvers.registerfilehash(name,content,content==nil) + local pathname = specification.filename + local content = caches.loadcontent(pathname,'files') + resolvers.registerfilehash(pathname,content,content==nil) end function generators.file(specification) - local path = specification.filename - local content = resolvers.scanfiles(path,false,true) -- scan once ---~ inspect(content) - resolvers.registerfilehash(path,content,true) + local pathname = specification.filename + local content = resolvers.scanfiles(pathname,false,true) -- scan once + resolvers.registerfilehash(pathname,content,true) end concatinators.file = file.join function finders.file(specification,filetype) - local filename = specification.filename + local filename = specification.filename local foundname = resolvers.findfile(filename,filetype) if foundname and foundname ~= "" then if trace_locating then diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua index bbd233ae7..ab5668c0b 100644 --- a/tex/context/base/data-ini.lua +++ b/tex/context/base/data-ini.lua @@ -6,10 +6,12 @@ if not modules then modules = { } end modules ['data-ini'] = { license = "see context related readme files", } +local next, type, getmetatable, rawset = next, type, getmetatable, rawset local gsub, find, gmatch, char = string.gsub, string.find, string.gmatch, string.char -local next, type = next, type - local filedirname, filebasename, filejoin = file.dirname, file.basename, file.join +local ostype, osname, osuname, ossetenv, osgetenv = os.type, os.name, os.uname, os.setenv, os.getenv + +local P, S, R, C, Cs, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.match local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) @@ -17,11 +19,9 @@ local trace_expansions = false trackers.register("resolvers.expansions", functi local report_initialization = logs.reporter("resolvers","initialization") -local ostype, osname, ossetenv, osgetenv = os.type, os.name, os.setenv, os.getenv - --- The code here used to be part of a data-res but for convenience --- we now split it over multiple files. As this file is now the --- starting point we introduce resolvers here. +-- The code here used to be part of a data-res but for convenience we now split it over multiple +-- files. As this file is now the starting point we introduce resolvers here. We also put some +-- helpers here that later can be reimplemented of extended. resolvers = resolvers or { } local resolvers = resolvers @@ -225,8 +225,117 @@ end -- a forward definition -if not resolvers.resolve then - function resolvers.resolve (s) return s end - function resolvers.unresolve(s) return s end - function resolvers.repath (s) return s end +-- Because we use resolvers.resolve a lot later on, we will implement the basics here and +-- add more later. + +local prefixes = utilities.storage.allocate() +resolvers.prefixes = prefixes + +local resolved = { } +local abstract = { } + +function resolvers.resetresolve(str) + resolved, abstract = { }, { } +end + +function resolvers.allprefixes(separator) + local all = table.sortedkeys(prefixes) + if separator then + for i=1,#all do + all[i] = all[i] .. ":" + end + end + return all +end + +local function _resolve_(method,target) + local action = prefixes[method] + if action then + return action(target) + else + return method .. ":" .. target + end +end + +function resolvers.unresolve(str) + return abstract[str] or str +end + +-- home:xx;selfautoparent:xx; + +local pattern = Cs((C(R("az")^2) * P(":") * C((1-S(" \"\';,"))^1) / _resolve_ + P(1))^0) + +local prefix = C(R("az")^2) * P(":") +local target = C((1-S(" \"\';,"))^1) +local notarget = (#S(";,") + P(-1)) * Cc("") + +local pattern = Cs(((prefix * (target + notarget)) / _resolve_ + P(1))^0) + +local function resolve(str) -- use schemes, this one is then for the commandline only + if type(str) == "table" then + local res = { } + for i=1,#str do + res[i] = resolve(str[i]) + end + return res + else + local res = resolved[str] + if not res then + res = lpegmatch(pattern,str) + resolved[str] = res + abstract[res] = str + end + return res + end +end + +resolvers.resolve = resolve + +if type(osuname) == "function" then + + for k, v in next, osuname() do + if not prefixes[k] then + prefixes[k] = function() return v end + end + end + +end + +if ostype == "unix" then + + -- We need to distringuish between a prefix and something else : so we + -- have a special repath variant for linux. Also, when a new prefix is + -- defined, we need to remake the matcher. + + local pattern + + local function makepattern(t,k,v) + if t then + rawset(t,k,v) + end + local colon = P(":") + for k, v in table.sortedpairs(prefixes) do + if p then + p = P(k) + p + else + p = P(k) + end + end + pattern = Cs((p * colon + colon/";" + P(1))^0) + end + + makepattern() + + table.setmetatablenewindex(prefixes,makepattern) + + function resolvers.repath(str) + return lpegmatch(pattern,str) + end + +else -- already the default: + + function resolvers.repath(str) + return str + end + end diff --git a/tex/context/base/data-lst.lua b/tex/context/base/data-lst.lua index 8996fa251..e4621a6e1 100644 --- a/tex/context/base/data-lst.lua +++ b/tex/context/base/data-lst.lua @@ -8,12 +8,16 @@ if not modules then modules = { } end modules ['data-lst'] = { -- used in mtxrun, can be loaded later .. todo -local find, concat, upper, format = string.find, table.concat, string.upper, string.format +local rawget, type, next = rawget, type, next + +local find, concat, upper = string.find, table.concat, string.upper local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs -resolvers.listers = resolvers.listers or { } +local resolvers = resolvers +local listers = resolvers.listers or { } +resolvers.listers = listers -local resolvers = resolvers +local resolveprefix = resolvers.resolve local report_lists = logs.reporter("resolvers","lists") @@ -25,7 +29,7 @@ local function tabstr(str) end end -function resolvers.listers.variables(pattern) +function listers.variables(pattern) local instance = resolvers.instance local environment = instance.environment local variables = instance.variables @@ -46,10 +50,10 @@ function resolvers.listers.variables(pattern) for key, value in sortedpairs(configured) do if key ~= "" and (pattern == "" or find(upper(key),pattern)) then report_lists(key) - report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset") - report_lists(" var: %s",tabstr(configured[key]) or "unset") - report_lists(" exp: %s",tabstr(expansions[key]) or "unset") - report_lists(" res: %s",tabstr(resolvers.resolve(expansions[key])) or "unset") + report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset") + report_lists(" var: %s",tabstr(configured[key]) or "unset") + report_lists(" exp: %s",tabstr(expansions[key]) or "unset") + report_lists(" res: %s",tabstr(resolveprefix(expansions[key])) or "unset") end end instance.environment = fastcopy(env) @@ -59,15 +63,15 @@ end local report_resolved = logs.reporter("system","resolved") -function resolvers.listers.configurations() +function listers.configurations() local configurations = resolvers.instance.specification for i=1,#configurations do - report_resolved("file : %s",resolvers.resolve(configurations[i])) + report_resolved("file : %s",resolveprefix(configurations[i])) end report_resolved("") local list = resolvers.expandedpathfromlist(resolvers.splitpath(resolvers.luacnfspec)) for i=1,#list do - local li = resolvers.resolve(list[i]) + local li = resolveprefix(list[i]) if lfs.isdir(li) then report_resolved("path - %s",li) else diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua index 0e7c81181..7c12a5940 100644 --- a/tex/context/base/data-lua.lua +++ b/tex/context/base/data-lua.lua @@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['data-lua'] = { -- This is now a plug in into l-lua (as we also use the extra paths elsewhere). -local resolvers, package = resolvers, package +local package, lpeg = package, lpeg local gsub = string.gsub local concat = table.concat @@ -16,18 +16,21 @@ local addsuffix = file.addsuffix local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match -local luasuffixes = { 'tex', 'lua' } -local libsuffixes = { 'lib' } -local luaformats = { 'TEXINPUTS', 'LUAINPUTS' } -local libformats = { 'CLUAINPUTS' } -local helpers = package.helpers or { } -local methods = helpers.methods or { } +local luasuffixes = { 'tex', 'lua' } +local libsuffixes = { 'lib' } +local luaformats = { 'TEXINPUTS', 'LUAINPUTS' } +local libformats = { 'CLUAINPUTS' } +local helpers = package.helpers or { } +local methods = helpers.methods or { } + +local resolvers = resolvers +local resolveprefix = resolvers.resolve + +helpers.report = logs.reporter("resolvers","libraries") trackers.register("resolvers.libraries", function(v) helpers.trace = v end) trackers.register("resolvers.locating", function(v) helpers.trace = v end) -helpers.report = logs.reporter("resolvers","libraries") - helpers.sequence = { "already loaded", "preload table", @@ -44,7 +47,7 @@ helpers.sequence = { local pattern = Cs(P("!")^0 / "" * (P("/") * P(-1) / "/" + P("/")^1 / "/" + 1)^0) function helpers.cleanpath(path) -- hm, don't we have a helper for this? - return resolvers.resolve(lpegmatch(pattern,path)) + return resolveprefix(lpegmatch(pattern,path)) end local loadedaslib = helpers.loadedaslib diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua index 67b9eb22b..4e8a48f50 100644 --- a/tex/context/base/data-met.lua +++ b/tex/context/base/data-met.lua @@ -36,8 +36,6 @@ local function splitmethod(filename) -- todo: filetype in specification end filename = file.collapsepath(filename,".") -- hm, we should keep ./ in some cases - -- filename = gsub(filename,"^%./",getcurrentdir().."/") -- we will merge dir.expandname and collapse some day - if not find(filename,"://",1,true) then return { scheme = "file", path = filename, original = filename, filename = filename } end diff --git a/tex/context/base/data-pre.lua b/tex/context/base/data-pre.lua index f2f5bddc4..edfe53dab 100644 --- a/tex/context/base/data-pre.lua +++ b/tex/context/base/data-pre.lua @@ -6,63 +6,61 @@ if not modules then modules = { } end modules ['data-pre'] = { license = "see context related readme files" } --- It could be interesting to hook the resolver in the file --- opener so that unresolved prefixes travel around and we --- get more abstraction. +local resolvers = resolvers +local prefixes = resolvers.prefixes --- As we use this beforehand we will move this up in the chain --- of loading. +local cleanpath = resolvers.cleanpath +local findgivenfile = resolvers.findgivenfile +local expansion = resolvers.expansion +local getenv = resolvers.getenv -- we can probably also use resolvers.expansion ---~ print(resolvers.resolve("abc env:tmp file:cont-en.tex path:cont-en.tex full:cont-en.tex rel:zapf/one/p-chars.tex")) +local basename = file.basename +local dirname = file.dirname +local joinpath = file.join -local resolvers = resolvers -local prefixes = utilities.storage.allocate() -resolvers.prefixes = prefixes - -local cleanpath, findgivenfile, expansion = resolvers.cleanpath, resolvers.findgivenfile, resolvers.expansion -local getenv = resolvers.getenv -- we can probably also use resolvers.expansion -local P, S, R, C, Cs, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.match -local joinpath, basename, dirname = file.join, file.basename, file.dirname -local getmetatable, rawset, type = getmetatable, rawset, type - --- getenv = function(...) return resolvers.getenv(...) end -- needs checking (definitions changes later on) +local isfile = lfs.isfile prefixes.environment = function(str) return cleanpath(expansion(str)) end -prefixes.relative = function(str,n) -- lfs.isfile - if io.exists(str) then - -- nothing - elseif io.exists("./" .. str) then - str = "./" .. str - else - local p = "../" - for i=1,n or 2 do - if io.exists(p .. str) then - str = p .. str - break - else - p = p .. "../" +local function relative(str,n) + if not isfile(str) then + local pstr = "./" .. str + if isfile(pstr) then + str = pstr + else + local p = "../" + for i=1,n or 2 do + local pstr = p .. str + if isfile(pstr) then + str = pstr + break + else + p = p .. "../" + end end end end return cleanpath(str) end +local function locate(str) + local fullname = findgivenfile(str) or "" + return cleanpath(fullname ~= "" and fullname or str) +end + +prefixes.relative = relative +prefixes.locate = locate + prefixes.auto = function(str) - local fullname = prefixes.relative(str) - if not lfs.isfile(fullname) then - fullname = prefixes.locate(str) + local fullname = relative(str) + if not isfile(fullname) then + fullname = locate(str) end return fullname end -prefixes.locate = function(str) - local fullname = findgivenfile(str) or "" - return cleanpath((fullname ~= "" and fullname) or str) -end - prefixes.filename = function(str) local fullname = findgivenfile(str) or "" return cleanpath(basename((fullname ~= "" and fullname) or str)) -- no cleanpath needed here @@ -115,132 +113,3 @@ prefixes.kpse = prefixes.locate prefixes.full = prefixes.locate prefixes.file = prefixes.filename prefixes.path = prefixes.pathname - -function resolvers.allprefixes(separator) - local all = table.sortedkeys(prefixes) - if separator then - for i=1,#all do - all[i] = all[i] .. ":" - end - end - return all -end - -local function _resolve_(method,target) - local action = prefixes[method] - if action then - return action(target) - else - return method .. ":" .. target - end -end - -local resolved, abstract = { }, { } - -function resolvers.resetresolve(str) - resolved, abstract = { }, { } -end - --- todo: use an lpeg (see data-lua for !! / stripper) - --- local function resolve(str) -- use schemes, this one is then for the commandline only --- if type(str) == "table" then --- local t = { } --- for i=1,#str do --- t[i] = resolve(str[i]) --- end --- return t --- else --- local res = resolved[str] --- if not res then --- res = gsub(str,"([a-z][a-z]+):([^ \"\';,]*)",_resolve_) -- home:xx;selfautoparent:xx; etc (comma added) --- resolved[str] = res --- abstract[res] = str --- end --- return res --- end --- end - --- home:xx;selfautoparent:xx; - -local pattern = Cs((C(R("az")^2) * P(":") * C((1-S(" \"\';,"))^1) / _resolve_ + P(1))^0) - -local prefix = C(R("az")^2) * P(":") -local target = C((1-S(" \"\';,"))^1) -local notarget = (#S(";,") + P(-1)) * Cc("") - -local pattern = Cs(((prefix * (target + notarget)) / _resolve_ + P(1))^0) - -local function resolve(str) -- use schemes, this one is then for the commandline only - if type(str) == "table" then - local t = { } - for i=1,#str do - t[i] = resolve(str[i]) - end - return t - else - local res = resolved[str] - if not res then - res = lpegmatch(pattern,str) - resolved[str] = res - abstract[res] = str - end - return res - end -end - -local function unresolve(str) - return abstract[str] or str -end - -resolvers.resolve = resolve -resolvers.unresolve = unresolve - -if type(os.uname) == "function" then - - for k, v in next, os.uname() do - if not prefixes[k] then - prefixes[k] = function() return v end - end - end - -end - -if os.type == "unix" then - - -- We need to distringuish between a prefix and something else : so we - -- have a special repath variant for linux. Also, when a new prefix is - -- defined, we need to remake the matcher. - - local pattern - - local function makepattern(t,k,v) - if t then - rawset(t,k,v) - end - local colon = P(":") - for k, v in table.sortedpairs(prefixes) do - if p then - p = P(k) + p - else - p = P(k) - end - end - pattern = Cs((p * colon + colon/";" + P(1))^0) - end - - makepattern() - - getmetatable(prefixes).__newindex = makepattern - - function resolvers.repath(str) - return lpegmatch(pattern,str) - end - -else -- already the default: - - function resolvers.repath(str) - return str - end - -end diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua index d79d78a72..844a0601f 100644 --- a/tex/context/base/data-res.lua +++ b/tex/context/base/data-res.lua @@ -18,7 +18,7 @@ if not modules then modules = { } end modules ['data-res'] = { -- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012) local gsub, find, lower, upper, match, gmatch = string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch -local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys +local concat, insert, sortedkeys, sortedhash = table.concat, table.insert, table.sortedkeys, table.sortedhash local next, type, rawget = next, type, rawget local os = os @@ -29,14 +29,22 @@ local formatters = string.formatters local filedirname = file.dirname local filebasename = file.basename local suffixonly = file.suffixonly +local addsuffix = file.addsuffix +local removesuffix = file.removesuffix local filejoin = file.join local collapsepath = file.collapsepath local joinpath = file.joinpath +local is_qualified_path = file.is_qualified_path + local allocate = utilities.storage.allocate local settings_to_array = utilities.parsers.settings_to_array + +local getcurrentdir = lfs.currentdir +local isfile = lfs.isfile +local isdir = lfs.isdir + local setmetatableindex = table.setmetatableindex local luasuffixes = utilities.lua.suffixes -local getcurrentdir = lfs.currentdir local trace_locating = false trackers .register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers .register("resolvers.details", function(v) trace_detail = v end) @@ -45,19 +53,23 @@ local resolve_otherwise = true directives.register("resolvers.otherwise", fun local report_resolving = logs.reporter("resolvers","resolving") -local resolvers = resolvers +local resolvers = resolvers local expandedpathfromlist = resolvers.expandedpathfromlist local checkedvariable = resolvers.checkedvariable local splitconfigurationpath = resolvers.splitconfigurationpath local methodhandler = resolvers.methodhandler +local filtered = resolvers.filtered_from_content +local lookup = resolvers.get_from_content +local cleanpath = resolvers.cleanpath +local resolveprefix = resolvers.resolve -local initializesetter = utilities.setters.initialize +local initializesetter = utilities.setters.initialize local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.setenv, os.getenv -resolvers.cacheversion = '1.0.1' -resolvers.configbanner = '' +resolvers.cacheversion = "1.100" +resolvers.configbanner = "" resolvers.homedir = environment.homedir resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" } resolvers.luacnfname = "texmfcnf.lua" @@ -155,7 +167,7 @@ function resolvers.setenv(key,value,raw) -- we feed back into the environment, and as this is used -- by other applications (via os.execute) we need to make -- sure that prefixes are resolve - ossetenv(key,raw and value or resolvers.resolve(value)) + ossetenv(key,raw and value or resolveprefix(value)) end end @@ -178,7 +190,7 @@ resolvers.env = getenv -- We are going to use some metatable trickery where we backtrack from -- expansion to variable to environment. -local function resolve(k) +local function resolvevariable(k) return instance.expansions[k] end @@ -191,12 +203,12 @@ local somekey = C(R("az","AZ","09","__","--")^1) local somethingelse = P(";") * ((1-S("!{}/\\"))^1 * P(";") / "") + P(";") * (P(";") / "") + P(1) -local variableexpander = Cs( (somevariable * (somekey/resolve) + somethingelse)^1 ) +local variableexpander = Cs( (somevariable * (somekey/resolvevariable) + somethingelse)^1 ) local cleaner = P("\\") / "/" + P(";") * S("!{}/\\")^0 * P(";")^1 / ";" local variablecleaner = Cs((cleaner + P(1))^0) -local somevariable = R("az","AZ","09","__","--")^1 / resolve +local somevariable = R("az","AZ","09","__","--")^1 / resolvevariable local variable = (P("$")/"") * (somevariable + (P("{")/"") * somevariable * (P("}")/"")) local variableresolver = Cs((variable + P(1))^0) @@ -206,9 +218,12 @@ end function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only - if trace_locating then + -- normally we only need one instance but for special cases we can (re)load one so + -- we stick to this model. + + if trace_locating then report_resolving("creating instance") - end + end local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate() @@ -356,14 +371,14 @@ local function identify_configuration_files() for i=1,#cnfpaths do local filepath = cnfpaths[i] local filename = collapsepath(filejoin(filepath,luacnfname)) - local realname = resolvers.resolve(filename) -- can still have "//" ... needs checking + local realname = resolveprefix(filename) -- can still have "//" ... needs checking -- todo: environment.skipweirdcnfpaths directive if trace_locating then - local fullpath = gsub(resolvers.resolve(collapsepath(filepath)),"//","/") + local fullpath = gsub(resolveprefix(collapsepath(filepath)),"//","/") local weirdpath = find(fullpath,"/texmf.+/texmf") or not find(fullpath,"/web2c",1,true) report_resolving("looking for %a on %s path %a from specification %a",luacnfname,weirdpath and "weird" or "given",fullpath,filepath) end - if lfs.isfile(realname) then + if isfile(realname) then specification[#specification+1] = filename -- unresolved as we use it in matching, relocatable if trace_locating then report_resolving("found configuration file %a",realname) @@ -386,7 +401,7 @@ local function load_configuration_files() local filename = specification[i] local pathname = filedirname(filename) local filename = filejoin(pathname,luacnfname) - local realname = resolvers.resolve(filename) -- no shortcut + local realname = resolveprefix(filename) -- no shortcut local blob = loadfile(realname) if blob then local setups = instance.setups @@ -394,7 +409,7 @@ local function load_configuration_files() local parent = data and data.parent if parent then local filename = filejoin(pathname,parent) - local realname = resolvers.resolve(filename) -- no shortcut + local realname = resolveprefix(filename) -- no shortcut local blob = loadfile(realname) if blob then local parentdata = blob() @@ -419,7 +434,7 @@ local function load_configuration_files() elseif variables[k] == nil then if trace_locating and not warning then report_resolving("variables like %a in configuration file %a should move to the 'variables' subtable", - k,resolvers.resolve(filename)) + k,resolveprefix(filename)) warning = true end variables[k] = v @@ -492,7 +507,7 @@ local function locate_file_databases() local stripped = lpegmatch(inhibitstripper,path) -- the !! thing if stripped ~= "" then local runtime = stripped == path - path = resolvers.cleanpath(path) + path = cleanpath(path) local spec = resolvers.splitmethod(stripped) if runtime and (spec.noscheme or spec.scheme == "file") then stripped = "tree:///" .. stripped @@ -558,8 +573,8 @@ function resolvers.renew(hashname) report_resolving("identifying tree %a",hashname) end end - local realpath = resolvers.resolve(hashname) - if lfs.isdir(realpath) then + local realpath = resolveprefix(hashname) + if isdir(realpath) then if trace_locating then report_resolving("using path %a",realpath) end @@ -710,7 +725,7 @@ function resolvers.registerextrapath(paths,subpaths) local ps = p .. "/" .. s if not done[ps] then newn = newn + 1 - ep[newn] = resolvers.cleanpath(ps) + ep[newn] = cleanpath(ps) done[ps] = true end end @@ -720,7 +735,7 @@ function resolvers.registerextrapath(paths,subpaths) local p = paths[i] if not done[p] then newn = newn + 1 - ep[newn] = resolvers.cleanpath(p) + ep[newn] = cleanpath(p) done[p] = true end end @@ -732,7 +747,7 @@ function resolvers.registerextrapath(paths,subpaths) local ps = ep[i] .. "/" .. s if not done[ps] then newn = newn + 1 - ep[newn] = resolvers.cleanpath(ps) + ep[newn] = cleanpath(ps) done[ps] = true end end @@ -791,7 +806,7 @@ function resolvers.cleanpathlist(str) local t = resolvers.expandedpathlist(str) if t then for i=1,#t do - t[i] = collapsepath(resolvers.cleanpath(t[i])) + t[i] = collapsepath(cleanpath(t[i])) end end return t @@ -851,7 +866,7 @@ function resolvers.registerfilehash(name,content,someerror) end local function isreadable(name) - local readable = lfs.isfile(name) -- not file.is_readable(name) asit can be a dir + local readable = isfile(name) -- not file.is_readable(name) asit can be a dir if trace_detail then if readable then report_resolving("file %a is readable",name) @@ -862,75 +877,59 @@ local function isreadable(name) return readable end --- name --- name/name +-- name | name/name local function collect_files(names) - local filelist, noffiles = { }, 0 + local filelist = { } + local noffiles = 0 + local function check(hash,root,pathname,path,name) + if not pathname or find(path,pathname) then + local variant = hash.type + local search = filejoin(root,path,name) -- funny no concatinator + local result = methodhandler('concatinators',variant,root,path,name) + if trace_detail then + report_resolving("match: variant %a, search %a, result %a",variant,search,result) + end + noffiles = noffiles + 1 + filelist[noffiles] = { variant, search, result } + end + end for k=1,#names do - local fname = names[k] + local filename = names[k] if trace_detail then - report_resolving("checking name %a",fname) + report_resolving("checking name %a",filename) end - local bname = filebasename(fname) - local dname = filedirname(fname) - if dname == "" or find(dname,"^%.") then - dname = false + local basename = filebasename(filename) + local pathname = filedirname(filename) + if pathname == "" or find(pathname,"^%.") then + pathname = false else - dname = gsub(dname,"%*",".*") - dname = "/" .. dname .. "$" + pathname = gsub(pathname,"%*",".*") + pathname = "/" .. pathname .. "$" end local hashes = instance.hashes for h=1,#hashes do - local hash = hashes[h] - local blobpath = hash.name - local files = blobpath and instance.files[blobpath] - if files then + local hash = hashes[h] + local hashname = hash.name + local content = hashname and instance.files[hashname] + if content then if trace_detail then - report_resolving("deep checking %a, base %a, pattern %a",blobpath,bname,dname) - end - local blobfile = files[bname] - if not blobfile then - local rname = "remap:"..bname - blobfile = files[rname] - if blobfile then - bname = files[rname] - blobfile = files[bname] - end + report_resolving("deep checking %a, base %a, pattern %a",blobpath,basename,pathname) end - if blobfile then - local blobroot = files.__path__ or blobpath - if type(blobfile) == 'string' then - if not dname or find(blobfile,dname) then - local variant = hash.type - -- local search = filejoin(blobpath,blobfile,bname) - local search = filejoin(blobroot,blobfile,bname) - local result = methodhandler('concatinators',hash.type,blobroot,blobfile,bname) - if trace_detail then - report_resolving("match: variant %a, search %a, result %a",variant,search,result) - end - noffiles = noffiles + 1 - filelist[noffiles] = { variant, search, result } - end + local path, name = lookup(content,basename) + if path then + local metadata = content.metadata + local realroot = metadata and metadata.path or hashname + if type(path) == "string" then + check(hash,realroot,pathname,path,name) else - for kk=1,#blobfile do - local vv = blobfile[kk] - if not dname or find(vv,dname) then - local variant = hash.type - -- local search = filejoin(blobpath,vv,bname) - local search = filejoin(blobroot,vv,bname) - local result = methodhandler('concatinators',hash.type,blobroot,vv,bname) - if trace_detail then - report_resolving("match: variant %a, search %a, result %a",variant,search,result) - end - noffiles = noffiles + 1 - filelist[noffiles] = { variant, search, result } - end + for i=1,#path do + check(hash,realroot,pathname,path[i],name) end end end elseif trace_locating then - report_resolving("no match in %a (%s)",blobpath,bname) + report_resolving("no match in %a (%s)",hashname,basename) end end end @@ -961,7 +960,7 @@ end local function can_be_dir(name) -- can become local local fakepaths = instance.fakepaths if not fakepaths[name] then - if lfs.isdir(name) then + if isdir(name) then fakepaths[name] = 1 -- directory else fakepaths[name] = 2 -- no directory @@ -987,10 +986,11 @@ local function find_analyze(filename,askedformat,allresults) if askedformat == "" then if ext == "" or not suffixmap[ext] then local defaultsuffixes = resolvers.defaultsuffixes + local formatofsuffix = resolvers.formatofsuffix for i=1,#defaultsuffixes do local forcedname = filename .. '.' .. defaultsuffixes[i] wantedfiles[#wantedfiles+1] = forcedname - filetype = resolvers.formatofsuffix(forcedname) + filetype = formatofsuffix(forcedname) if trace_locating then report_resolving("forcing filetype %a",filetype) end @@ -1032,7 +1032,7 @@ local function find_wildcard(filename,allresults) if trace_locating then report_resolving("checking wildcard %a", filename) end - local method, result = resolvers.findwildcardfiles(filename) + local result = resolvers.findwildcardfiles(filename) if result then return "wildcard", result end @@ -1040,7 +1040,7 @@ local function find_wildcard(filename,allresults) end local function find_qualified(filename,allresults,askedformat,alsostripped) -- this one will be split too - if not file.is_qualified_path(filename) then + if not is_qualified_path(filename) then return end if trace_locating then @@ -1152,7 +1152,6 @@ local function find_intree(filename,filetype,wantedfiles,allresults) if trace_detail then report_resolving("checking filename %a",filename) end - local resolve = resolvers.resolve local result = { } -- pathlist : resolved -- dirlist : unresolved or resolved @@ -1177,9 +1176,9 @@ local function find_intree(filename,filetype,wantedfiles,allresults) local f = fl[2] local d = dirlist[k] -- resolve is new: - if find(d,expression) or find(resolve(d),expression) then + if find(d,expression) or find(resolveprefix(d),expression) then -- todo, test for readable - result[#result+1] = resolve(fl[3]) -- no shortcut + result[#result+1] = resolveprefix(fl[3]) -- no shortcut done = true if allresults then if trace_detail then @@ -1201,7 +1200,7 @@ local function find_intree(filename,filetype,wantedfiles,allresults) else method = "filesystem" -- bonus, even when !! is specified pathname = gsub(pathname,"/+$","") - pathname = resolve(pathname) + pathname = resolveprefix(pathname) local scheme = url.hasscheme(pathname) if not scheme or scheme == "file" then local pname = gsub(pathname,"%.%*$",'') @@ -1299,7 +1298,7 @@ local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other local filelist = collect_files(wantedfiles) local fl = filelist and filelist[1] if fl then - return "otherwise", { resolvers.resolve(fl[3]) } -- filename + return "otherwise", { resolveprefix(fl[3]) } -- filename end end @@ -1414,44 +1413,39 @@ function resolvers.findpath(filename,filetype) end local function findgivenfiles(filename,allresults) - local bname, result = filebasename(filename), { } - local hashes = instance.hashes - local noffound = 0 - for k=1,#hashes do - local hash = hashes[k] - local files = instance.files[hash.name] or { } - local blist = files[bname] - if not blist then - local rname = "remap:"..bname - blist = files[rname] - if blist then - bname = files[rname] - blist = files[bname] - end + local base = filebasename(filename) + local result = { } + local hashes = instance.hashes + -- + local function okay(hash,path,name) + local found = methodhandler('concatinators',hash.type,hash.name,path,name) + if found and found ~= "" then + result[#result+1] = resolveprefix(found) + return not allresults end - if blist then - if type(blist) == 'string' then - local found = methodhandler('concatinators',hash.type,hash.name,blist,bname) or "" - if found ~= "" then - noffound = noffound + 1 - result[noffound] = resolvers.resolve(found) - if not allresults then - break - end + end + -- + for k=1,#hashes do + local hash = hashes[k] + local content = instance.files[hash.name] + if content then + local path, name = lookup(content,base) + if not path then + -- no match + elseif type(path) == "string" then + if okay(hash,path,name) then + return result end else - for kk=1,#blist do - local vv = blist[kk] - local found = methodhandler('concatinators',hash.type,hash.name,vv,bname) or "" - if found ~= "" then - noffound = noffound + 1 - result[noffound] = resolvers.resolve(found) - if not allresults then break end + for i=1,#path do + if okay(hash,path[i],name) then + return result end end end end end + -- return result end @@ -1463,37 +1457,6 @@ function resolvers.findgivenfile(filename) return findgivenfiles(filename,false)[1] or "" end -local function doit(path,blist,bname,tag,variant,result,allresults) - local done = false - if blist and variant then - local resolve = resolvers.resolve -- added - if type(blist) == 'string' then - -- make function and share code - if find(lower(blist),path) then - local full = methodhandler('concatinators',variant,tag,blist,bname) or "" - result[#result+1] = resolve(full) - done = true - end - else - for kk=1,#blist do - local vv = blist[kk] - if find(lower(vv),path) then - local full = methodhandler('concatinators',variant,tag,vv,bname) or "" - result[#result+1] = resolve(full) - done = true - if not allresults then break end - end - end - end - end - return done -end - ---~ local makewildcard = Cs( ---~ (P("^")^0 * P("/") * P(-1) + P(-1)) /".*" ---~ + (P("^")^0 * P("/") / "") * (P("*")/".*" + P("-")/"%%-" + P("?")/"."+ P("\\")/"/" + P(1))^0 ---~ ) - local makewildcard = Cs( (P("^")^0 * P("/") * P(-1) + P(-1)) /".*" + (P("^")^0 * P("/") / "")^0 * (P("*")/".*" + P("-")/"%%-" + P(".")/"%%." + P("?")/"."+ P("\\")/"/" + P(1))^0 @@ -1503,37 +1466,80 @@ function resolvers.wildcardpattern(pattern) return lpegmatch(makewildcard,pattern) or pattern end -local function findwildcardfiles(filename,allresults,result) -- todo: remap: and lpeg - result = result or { } ---~ local path = lower(lpegmatch(makewildcard,filedirname (filename))) ---~ local name = lower(lpegmatch(makewildcard,filebasename(filename))) - local base = filebasename(filename) - local dirn = filedirname(filename) - local path = lower(lpegmatch(makewildcard,dirn) or dirn) - local name = lower(lpegmatch(makewildcard,base) or base) - local files, done = instance.files, false +-- we use more function calls than before but we also have smaller trees so +-- why bother + +local function findwildcardfiles(filename,allresults,result) + local result = result or { } + local base = filebasename(filename) + local dirn = filedirname(filename) + local path = lower(lpegmatch(makewildcard,dirn) or dirn) + local name = lower(lpegmatch(makewildcard,base) or base) + local files = instance.files + -- if find(name,"*",1,true) then local hashes = instance.hashes + local function okay(found,path,base,hashname,hashtype) + if find(found,path) then + local full = methodhandler('concatinators',hashtype,hashname,found,base) + if full and full ~= "" then + result[#result+1] = resolveprefix(full) + return not allresults + end + end + end for k=1,#hashes do - local hash = hashes[k] - local hashname, hashtype = hash.name, hash.type - for kk, hh in next, files[hashname] do - if not find(kk,"^remap:") then - if find(lower(kk),name) then - if doit(path,hh,kk,hashname,hashtype,result,allresults) then done = true end - if done and not allresults then break end + local hash = hashes[k] + local hashname = hash.name + local hashtype = hash.type + if hashname and hashtype then + for found, base in filtered(files[hashname],name) do + if type(found) == 'string' then + if okay(found,path,base,hashname,hashtype) then + break + end + else + for i=1,#found do + if okay(found[i],path,base,hashname,hashtype) then + break + end + end end end end end else + local function okayokay(found,path,base,hashname,hashtype) + if find(found,path) then + local full = methodhandler('concatinators',hashtype,hashname,found,base) + if full and full ~= "" then + result[#result+1] = resolveprefix(full) + return not allresults + end + end + end + -- local hashes = instance.hashes --- inspect(hashes) for k=1,#hashes do - local hash = hashes[k] - local hashname, hashtype = hash.name, hash.type - if doit(path,files[hashname][base],base,hashname,hashtype,result,allresults) then done = true end - if done and not allresults then break end + local hash = hashes[k] + local hashname = hash.name + local hashtype = hash.type + if hashname and hashtype then + local found, base = lookup(content,base) + if not found then + -- nothing + elseif type(found) == 'string' then + if okay(found,path,base,hashname,hashtype) then + break + end + else + for i=1,#found do + if okay(found[i],path,base,hashname,hashtype) then + break + end + end + end + end end end -- we can consider also searching the paths not in the database, but then @@ -1626,7 +1632,7 @@ end function resolvers.dowithpath(name,func) local pathlist = resolvers.expandedpathlist(name) for i=1,#pathlist do - func("^"..resolvers.cleanpath(pathlist[i])) + func("^"..cleanpath(pathlist[i])) end end @@ -1636,23 +1642,23 @@ end function resolvers.locateformat(name) local engine = environment.ownmain or "luatex" - local barename = file.removesuffix(name) - local fullname = file.addsuffix(barename,"fmt") + local barename = removesuffix(name) + local fullname = addsuffix(barename,"fmt") local fmtname = caches.getfirstreadablefile(fullname,"formats",engine) or "" if fmtname == "" then fmtname = resolvers.findfile(fullname) - fmtname = resolvers.cleanpath(fmtname) + fmtname = cleanpath(fmtname) end if fmtname ~= "" then - local barename = file.removesuffix(fmtname) - local luaname = file.addsuffix(barename,luasuffixes.lua) - local lucname = file.addsuffix(barename,luasuffixes.luc) - local luiname = file.addsuffix(barename,luasuffixes.lui) - if lfs.isfile(luiname) then + local barename = removesuffix(fmtname) + local luaname = addsuffix(barename,luasuffixes.lua) + local lucname = addsuffix(barename,luasuffixes.luc) + local luiname = addsuffix(barename,luasuffixes.lui) + if isfile(luiname) then return barename, luiname - elseif lfs.isfile(lucname) then + elseif isfile(lucname) then return barename, lucname - elseif lfs.isfile(luaname) then + elseif isfile(luaname) then return barename, luaname end end @@ -1671,35 +1677,29 @@ end function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead local instance = resolvers.instance - local hashes = instance.hashes + local hashes = instance.hashes for i=1,#hashes do - local hash = hashes[i] + local hash = hashes[i] local blobtype = hash.type local blobpath = hash.name - if blobpath then + if blobtype and blobpath then + local total = 0 + local checked = 0 + local done = 0 if before then before(blobtype,blobpath,pattern) end - local files = instance.files[blobpath] - local total, checked, done = 0, 0, 0 - if files then - for k, v in table.sortedhash(files) do -- next, files do, beware: this is not the resolve order - total = total + 1 - if find(k,"^remap:") then - -- forget about these - elseif find(k,pattern) then - if type(v) == "string" then - checked = checked + 1 - if handle(blobtype,blobpath,v,k) then - done = done + 1 - end - else - checked = checked + #v - for i=1,#v do - if handle(blobtype,blobpath,v[i],k) then - done = done + 1 - end - end + for path, name in filtered(instance.files[blobpath],pattern) do + if type(path) == "string" then + checked = checked + 1 + if handle(blobtype,blobpath,path,name) then + done = done + 1 + end + else + checked = checked + #path + for i=1,#path do + if handle(blobtype,blobpath,path[i],name) then + done = done + 1 end end end @@ -1711,8 +1711,8 @@ function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, end end -resolvers.obsolete = resolvers.obsolete or { } -local obsolete = resolvers.obsolete +local obsolete = resolvers.obsolete or { } +resolvers.obsolete = obsolete resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles diff --git a/tex/context/base/data-tmf.lua b/tex/context/base/data-tmf.lua index c52225193..e0ccac257 100644 --- a/tex/context/base/data-tmf.lua +++ b/tex/context/base/data-tmf.lua @@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-tmf'] = { license = "see context related readme files" } -local resolvers = resolvers +local resolvers = resolvers local report_tds = logs.reporter("resolvers","tds") diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua index 3e109dcfe..8ee5792cc 100644 --- a/tex/context/base/data-tmp.lua +++ b/tex/context/base/data-tmp.lua @@ -35,6 +35,7 @@ local report_caches = logs.reporter("resolvers","caches") local report_resolvers = logs.reporter("resolvers","caching") local resolvers = resolvers +local cleanpath = resolvers.cleanpath -- intermezzo @@ -72,7 +73,7 @@ local writable, readables, usedreadables = nil, { }, { } local function identify() -- Combining the loops makes it messy. First we check the format cache path -- and when the last component is not present we try to create it. - local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE") + local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE") -- forward ref if texmfcaches then for k=1,#texmfcaches do local cachepath = texmfcaches[k] @@ -369,10 +370,12 @@ function caches.contentstate() return content_state or { } end -function caches.loadcontent(cachename,dataname) - local name = caches.hashed(cachename) - local full, path = caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees") - local filename = file.join(path,name) +function caches.loadcontent(cachename,dataname,filename) + if not filename then + local name = caches.hashed(cachename) + local full, path = caches.getfirstreadablefile(addsuffix(name,luasuffixes.lua),"trees") + filename = file.join(path,name) + end local blob = loadfile(addsuffix(filename,luasuffixes.luc)) or loadfile(addsuffix(filename,luasuffixes.lua)) if blob then local data = blob() @@ -406,10 +409,12 @@ function caches.collapsecontent(content) end end -function caches.savecontent(cachename,dataname,content) - local name = caches.hashed(cachename) - local full, path = caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees") - local filename = file.join(path,name) -- is full +function caches.savecontent(cachename,dataname,content,filename) + if not filename then + local name = caches.hashed(cachename) + local full, path = caches.setfirstwritablefile(addsuffix(name,luasuffixes.lua),"trees") + filename = file.join(path,name) -- is full + end local luaname = addsuffix(filename,luasuffixes.lua) local lucname = addsuffix(filename,luasuffixes.luc) if trace_locating then diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua index 0a8b00d9b..3f11ca878 100644 --- a/tex/context/base/data-tre.lua +++ b/tex/context/base/data-tre.lua @@ -8,48 +8,60 @@ if not modules then modules = { } end modules ['data-tre'] = { -- \input tree://oeps1/**/oeps.tex -local find, gsub, format = string.find, string.gsub, string.format +local find, gsub, lower = string.find, string.gsub, string.lower +local basename, dirname, joinname = file.basename, file.dirname, file .join +local globdir, isdir = dir.glob, lfs.isdir local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) -local report_trees = logs.reporter("resolvers","trees") +local report_trees = logs.reporter("resolvers","trees") -local resolvers = resolvers +local resolvers = resolvers +local resolveprefix = resolvers.resolve +local notfound = resolvers.finders.notfound -local done, found, notfound = { }, { }, resolvers.finders.notfound +-- A tree search is rather dumb ... there is some basic caching of searched trees +-- but nothing is cached over runs ... it's also a wildcard one so we cannot use +-- the normal scanner. -function resolvers.finders.tree(specification) +local collectors = { } +local found = { } + +function resolvers.finders.tree(specification) -- to be adapted to new formats local spec = specification.filename - local fnd = found[spec] - if fnd == nil then + local okay = found[spec] + if okay == nil then if spec ~= "" then - local path, name = file.dirname(spec), file.basename(spec) - if path == "" then path = "." end - local hash = done[path] - if not hash then - local pattern = path .. "/*" -- we will use the proper splitter - hash = dir.glob(pattern) - done[path] = hash + local path = dirname(spec) + local name = basename(spec) + if path == "" then + path = "." + end + local names = collectors[path] + if not names then + local pattern = find(path,"/%*+$") and path or (path .. "/*") + names = globdir(pattern) + collectors[path] = names end local pattern = "/" .. gsub(name,"([%.%-%+])", "%%%1") .. "$" - for k=1,#hash do - local v = hash[k] - if find(v,pattern) then - found[spec] = v - return v + for i=1,#names do + local fullname = names[i] + if find(fullname,pattern) then + found[spec] = fullname + return fullname end end end - fnd = notfound() -- false - found[spec] = fnd + okay = notfound() -- false + found[spec] = okay end - return fnd + return okay end function resolvers.locators.tree(specification) local name = specification.filename - local realname = resolvers.resolve(name) -- no shortcut - if realname and realname ~= '' and lfs.isdir(realname) then + local realname = resolveprefix(name) -- no shortcut + if realname and realname ~= '' and isdir(realname) then if trace_locating then report_trees("locator %a found",realname) end @@ -69,7 +81,56 @@ function resolvers.hashers.tree(specification) resolvers.generators.file(specification) end -resolvers.concatinators.tree = resolvers.concatinators.file -resolvers.generators.tree = resolvers.generators.file -resolvers.openers.tree = resolvers.openers.file -resolvers.loaders.tree = resolvers.loaders.file +-- This is a variation on tree lookups but this time we do cache in the given +-- root. We use a similar hasher as the resolvers because we have to deal with +-- for instance trees with 50K xml files plus a similar amount of resources to +-- deal and we don't want too much overhead. + +local collectors = { } + +table.setmetatableindex(collectors, function(t,k) + local rootname = gsub(k,"[/%*]+$","") + local dataname = joinname(rootname,"dirlist") + local data = caches.loadcontent(dataname,"files",dataname) + local content = data and data.content + local lookup = resolvers.get_from_content + if not content then + content = resolvers.scanfiles(rootname) + caches.savecontent(dataname,"files",content,dataname) + end + local files = content.files + local v = function(filename) + local path, name = lookup(content,filename) + if not path then + return filename + elseif type(path) == "table" then + -- maybe a warning that the first name is taken + path = path[1] + end + return joinname(rootname,path,name) + end + t[k] = v + return v +end) + +function resolvers.finders.dirlist(specification) -- can be called directly too + local spec = specification.filename + if spec ~= "" then + local path, name = dirname(spec), basename(spec) + return path and collectors[path](name) or notfound() + end + return notfound() +end + +resolvers.locators .dirlist = resolvers.locators .tree +resolvers.hashers .dirlist = resolvers.hashers .tree +resolvers.generators.dirlist = resolvers.generators.file +resolvers.openers .dirlist = resolvers.openers .file +resolvers.loaders .dirlist = resolvers.loaders .file + +-- local locate = collectors[ [[E:\temporary\mb-mp]] ] +-- local locate = collectors( [[\\storage-2\resources\mb-mp]] ) + +-- print(resolvers.findtexfile("tree://e:/temporary/mb-mp/**/VB_wmf_03_vw_01d_ant.jpg")) +-- print(resolvers.findtexfile("tree://t:/**/tufte.tex")) +-- print(resolvers.findtexfile("dirlist://e:/temporary/mb-mp/**/VB_wmf_03_vw_01d_ant.jpg")) diff --git a/tex/context/base/data-zip.lua b/tex/context/base/data-zip.lua index 5db69670c..a9d4d7a95 100644 --- a/tex/context/base/data-zip.lua +++ b/tex/context/base/data-zip.lua @@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['data-zip'] = { license = "see context related readme files" } --- partly redone .. needs testing +-- real old code ... partly redone .. needs testing due to changes as well as a decent overhaul local format, find, match = string.format, string.find, string.match @@ -64,7 +64,7 @@ function zip.openarchive(name) local arch = archives[name] if not arch then local full = resolvers.findfile(name) or "" - arch = (full ~= "" and zip.open(full)) or false + arch = full ~= "" and zip.open(full) or false archives[name] = arch end return arch @@ -235,30 +235,42 @@ function resolvers.usezipfile(archive) end function resolvers.registerzipfile(z,tree) - local files, filter = { }, "" - if tree == "" then - filter = "^(.+)/(.-)$" - else - filter = format("^%s/(.+)/(.-)$",tree) - end + local names = { } + local files = { } -- somewhat overkill .. todo + local remap = { } -- somewhat overkill .. todo + local n = 0 + local filter = tree == "" and "^(.+)/(.-)$" or format("^%s/(.+)/(.-)$",tree) + local register = resolvers.registerfile if trace_locating then report_zip("registering: using filter %a",filter) end - local register, n = resolvers.registerfile, 0 for i in z:files() do - local path, name = match(i.filename,filter) - if path then - if name and name ~= '' then - register(files, name, path) - n = n + 1 - else - -- directory + local filename = i.filename + local path, name = match(filename,filter) + if not path then + n = n + 1 + register(names,filename,"") + local usedname = lower(filename) + files[usedname] = "" + if usedname ~= filename then + remap[usedname] = filename end - else - register(files, i.filename, '') + elseif name and name ~= "" then n = n + 1 + register(names,name,path) + local usedname = lower(name) + files[usedname] = path + if usedname ~= name then + remap[usedname] = name + end + else + -- directory end end report_zip("registering: %s files registered",n) - return files + return { + -- metadata = { }, + files = files, + remap = remap, + } end diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua index c88eb7e9d..ca0de2696 100644 --- a/tex/context/base/file-job.lua +++ b/tex/context/base/file-job.lua @@ -42,6 +42,7 @@ local is_qualified_path = file.is_qualified_path local cleanpath = resolvers.cleanpath local inputstack = resolvers.inputstack +local resolveprefix = resolvers.resolve local v_outer = variables.outer local v_text = variables.text @@ -535,7 +536,7 @@ local function process(what,name) local depth = #typestack local process -- - name = resolvers.resolve(name) + name = resolveprefix(name) -- -- if not tolerant then -- okay, would be best but not compatible with mkii diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua index f74e13e81..e5f587105 100644 --- a/tex/context/base/font-map.lua +++ b/tex/context/base/font-map.lua @@ -149,6 +149,8 @@ local namesplitter = Ct(C((1 - ligseparator - varseparator)^1) * (ligseparator * -- print(string.formatters["%s: [% t]"](name,split)) -- end +-- maybe: ff fi fl ffi ffl => f_f f_i f_l f_f_i f_f_l + -- test("i.f_") -- test("this") -- test("this.that") diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index 7385d6f31..8debcc3bb 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -22,7 +22,7 @@ local handlers = fonts.handlers handlers.otf = handlers.otf or { } local otf = handlers.otf -otf.version = otf.version or 2.755 +otf.version = otf.version or 2.756 otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true) function otf.loadcached(filename,format,sub) diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index 0a5d1cfea..ed9cabedc 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -48,7 +48,7 @@ local otf = fonts.handlers.otf otf.glists = { "gsub", "gpos" } -otf.version = 2.755 -- beware: also sync font-mis.lua +otf.version = 2.756 -- beware: also sync font-mis.lua otf.cache = containers.define("fonts", "otf", otf.version, true) local fontdata = fonts.hashes.identifiers @@ -1473,6 +1473,14 @@ actions["reorganize lookups"] = function(data,filename,raw) -- we could check fo rule.current = s_hashed(names,s_h_cache) end rule.glyphs = nil + local lookups = rule.lookups + if lookups then + for i=1,#names do + if not lookups[i] then + lookups[i] = "" -- fix sparse array + end + end + end end end end diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua index 18ed46a2f..a1bee5d54 100644 --- a/tex/context/base/font-syn.lua +++ b/tex/context/base/font-syn.lua @@ -33,7 +33,7 @@ local exists = io.exists local findfile = resolvers.findfile local cleanpath = resolvers.cleanpath -local resolveresolved = resolvers.resolve +local resolveprefix = resolvers.resolve local settings_to_hash = utilities.parsers.settings_to_hash_tolerant @@ -1065,15 +1065,15 @@ local function analyzefiles(olddata) resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name) if method == "file" or method == "tree" then local completename = root .."/" .. path .. "/" .. name - completename = resolveresolved(completename) -- no shortcut + completename = resolveprefix(completename) -- no shortcut identify(completename,name,suffix,name) return true end end, function(blobtype,blobpath,pattern) - blobpath = resolveresolved(blobpath) -- no shortcut + blobpath = resolveprefix(blobpath) -- no shortcut report_names("scanning path %a for %s files",blobpath,suffix) end, function(blobtype,blobpath,pattern,total,checked,done) - blobpath = resolveresolved(blobpath) -- no shortcut + blobpath = resolveprefix(blobpath) -- no shortcut report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done) end) end diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua index 257212060..660529baf 100644 --- a/tex/context/base/l-dir.lua +++ b/tex/context/base/l-dir.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['l-dir'] = { -- dir.expandname will be merged with cleanpath and collapsepath local type, select = type, select -local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub +local find, gmatch, match, gsub, sub = string.find, string.gmatch, string.match, string.gsub, string.sub local concat, insert, remove, unpack = table.concat, table.insert, table.remove, table.unpack local lpegmatch = lpeg.match @@ -21,8 +21,8 @@ local lfs = lfs local attributes = lfs.attributes local walkdir = lfs.dir -local isdir = lfs.isdir -local isfile = lfs.isfile +local isdir = lfs.isdir -- not robust, will be overloaded anyway +local isfile = lfs.isfile -- not robust, will be overloaded anyway local currentdir = lfs.currentdir local chdir = lfs.chdir local mkdir = lfs.mkdir @@ -31,20 +31,36 @@ local onwindows = os.type == "windows" or find(os.getenv("PATH"),";",1,true) -- in case we load outside luatex -if not isdir then - function isdir(name) - local a = attributes(name) - return a and a.mode == "directory" +if onwindows then + + -- lfs.isdir does not like trailing / + -- lfs.dir accepts trailing / + + isdir = function(name) + name = gsub(name,"([/\\]+)$","/.") + return attributes(name,"mode") == "directory" end - lfs.isdir = isdir -end -if not isfile then - function isfile(name) - local a = attributes(name) - return a and a.mode == "file" + isfile = function(name) + return attributes(name,"mode") == "file" + end + + lfs.isdir = isdir + lfs.isfile = isfile + +else + + isdir = function(name) + return attributes(name,"mode") == "directory" end + + isfile = function(name) + return attributes(name,"mode") == "file" + end + + lfs.isdir = isdir lfs.isfile = isfile + end -- handy @@ -53,63 +69,104 @@ function dir.current() return (gsub(currentdir(),"\\","/")) end --- optimizing for no find (*) does not save time - ---~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs ---~ local ok, scanner ---~ if path == "/" then ---~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe ---~ else ---~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe ---~ end ---~ if ok and type(scanner) == "function" then ---~ if not find(path,"/$") then path = path .. '/' end ---~ for name in scanner do ---~ local full = path .. name ---~ local mode = attributes(full,'mode') ---~ if mode == 'file' then ---~ if find(full,patt) then ---~ action(full) ---~ end ---~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then ---~ globpattern(full,patt,recurse,action) ---~ end ---~ end ---~ end ---~ end - -local lfsisdir = isdir - -local function isdir(path) - path = gsub(path,"[/\\]+$","") - return lfsisdir(path) -end +-- somewhat optimized -lfs.isdir = isdir +local function glob_pattern_function(path,patt,recurse,action) + if isdir(path) then + local usedpath + if path == "/" then + usedpath = "/." + elseif not find(path,"/$") then + usedpath = path .. "/." + path = path .. "/" + else + usedpath = path + end + local dirs + for name in walkdir(usedpath) do + if name ~= "." and name ~= ".." then + local full = path .. name + local mode = attributes(full,'mode') + if mode == 'file' then + if not patt or find(full,patt) then + action(full) + end + elseif recurse and mode == "directory" then + if not dirs then + dirs = { full } + else + dirs[#dirs+1] = full + end + end + end + end + if dirs then + for i=1,#dirs do + glob_pattern_function(dirs[i],patt,recurse,action) + end + end + end +end -local function globpattern(path,patt,recurse,action) - if path == "/" then - path = path .. "." - elseif not find(path,"/$") then - path = path .. '/' +local function glob_pattern_table(path,patt,recurse,result) + if not result then + result = { } end - if isdir(path) then -- lfs.isdir does not like trailing / - for name in walkdir(path) do -- lfs.dir accepts trailing / - local full = path .. name - local mode = attributes(full,'mode') - if mode == 'file' then - if find(full,patt) then - action(full) + if isdir(path) then + local usedpath + if path == "/" then + usedpath = "/." + elseif not find(path,"/$") then + usedpath = path .. "/." + path = path .. "/" + else + usedpath = path + end + local dirs + for name in walkdir(usedpath) do + if name ~= "." and name ~= ".." then + local full = path .. name + local mode = attributes(full,'mode') + if mode == 'file' then + if not patt or find(full,patt) then + result[#result+1] = full + end + elseif recurse and mode == "directory" then + if not dirs then + dirs = { full } + else + dirs[#dirs+1] = full + end end - elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then - globpattern(full,patt,recurse,action) end end + if dirs then + for i=1,#dirs do + glob_pattern_table(dirs[i],patt,recurse,result) + end + end + end + return result +end + +local function globpattern(path,patt,recurse,method) + local kind = type(method) + if pattern and sub(patt,1,-3) == path then + patt = false + end + if kind == "function" then + return glob_pattern_function(path,patt,recurse,method) + elseif kind == "table" then + return glob_pattern_table(path,patt,recurse,method) + else + return glob_pattern_table(path,patt,recurse,{ }) end end dir.globpattern = globpattern +-- never or seldom used so far: + local function collectpattern(path,patt,recurse,result) local ok, scanner result = result or { } @@ -119,18 +176,26 @@ local function collectpattern(path,patt,recurse,result) ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe end if ok and type(scanner) == "function" then - if not find(path,"/$") then path = path .. '/' end + if not find(path,"/$") then + path = path .. '/' + end for name in scanner, first do - local full = path .. name - local attr = attributes(full) - local mode = attr.mode - if mode == 'file' then - if find(full,patt) then + if name == "." then + -- skip + elseif name == ".." then + -- skip + else + local full = path .. name + local attr = attributes(full) + local mode = attr.mode + if mode == 'file' then + if find(full,patt) then + result[name] = attr + end + elseif recurse and mode == "directory" then + attr.list = collectpattern(full,patt,recurse) result[name] = attr end - elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then - attr.list = collectpattern(full,patt,recurse) - result[name] = attr end end end @@ -143,15 +208,10 @@ local separator if onwindows then -- we could sanitize here --- pattern = Ct { --- [1] = (C(P(".") + S("/\\")^1) + C(R("az","AZ") * P(":") * S("/\\")^0) + Cc("./")) * V(2) * V(3), --- [2] = C(((1-S("*?/\\"))^0 * S("/\\"))^0), --- [3] = C(P(1)^0) --- } - local slash = S("/\\") / "/" - pattern = Ct { +-- pattern = Ct { + pattern = { [1] = (Cs(P(".") + slash^1) + Cs(R("az","AZ") * P(":") * slash^0) + Cc("./")) * V(2) * V(3), [2] = Cs(((1-S("*?/\\"))^0 * slash)^0), [3] = Cs(P(1)^0) @@ -159,7 +219,8 @@ if onwindows then -- we could sanitize here else -- assume unix - pattern = Ct { +-- pattern = Ct { + pattern = { [1] = (C(P(".") + P("/")^1) + Cc("./")) * V(2) * V(3), [2] = C(((1-S("*?/"))^0 * P("/"))^0), [3] = C(P(1)^0) @@ -186,12 +247,11 @@ local function glob(str,t) elseif isfile(str) then t(str) else - local split = lpegmatch(pattern,str) -- we could use the file splitter - if split then - local root, path, base = split[1], split[2], split[3] + local root, path, base = lpegmatch(pattern,str) -- we could use the file splitter + if root and path and base then local recurse = find(base,"**",1,true) -- find(base,"%*%*") - local start = root .. path - local result = lpegmatch(filter,start .. base) + local start = root .. path + local result = lpegmatch(filter,start .. base) globpattern(start,result,recurse,t) end end @@ -210,16 +270,12 @@ local function glob(str,t) return { str } end else - local split = lpegmatch(pattern,str) -- we could use the file splitter - if split then - local t = t or { } - local action = action or function(name) t[#t+1] = name end - local root, path, base = split[1], split[2], split[3] - local recurse = find(base,"**",1,true) -- find(base,"%*%*") - local start = root .. path - local result = lpegmatch(filter,start .. base) - globpattern(start,result,recurse,action) - return t + local root, path, base = lpegmatch(pattern,str) -- we could use the file splitter + if root and path and base then + local recurse = find(base,"**",1,true) -- find(base,"%*%*") + local start = root .. path + local result = lpegmatch(filter,start .. base) + return globpattern(start,result,recurse,t) else return { } end @@ -229,11 +285,20 @@ end dir.glob = glob ---~ list = dir.glob("**/*.tif") ---~ list = dir.glob("/**/*.tif") ---~ list = dir.glob("./**/*.tif") ---~ list = dir.glob("oeps/**/*.tif") ---~ list = dir.glob("/oeps/**/*.tif") +-- local c = os.clock() +-- local t = dir.glob("e:/**") +-- local t = dir.glob("t:/sources/**") +-- local t = dir.glob("t:/**") +-- print(os.clock()-c,#t) + +-- for i=1,3000 do print(t[i]) end +-- for i=1,10 do print(t[i]) end + +-- list = dir.glob("**/*.tif") +-- list = dir.glob("/**/*.tif") +-- list = dir.glob("./**/*.tif") +-- list = dir.glob("oeps/**/*.tif") +-- list = dir.glob("/oeps/**/*.tif") local function globfiles(path,recurse,func,files) -- func == pattern or function if type(func) == "string" then @@ -275,10 +340,10 @@ function dir.ls(pattern) return concat(glob(pattern),"\n") end ---~ mkdirs("temp") ---~ mkdirs("a/b/c") ---~ mkdirs(".","/a/b/c") ---~ mkdirs("a","b","c") +-- mkdirs("temp") +-- mkdirs("a/b/c") +-- mkdirs(".","/a/b/c") +-- mkdirs("a","b","c") local make_indeed = true -- false @@ -347,17 +412,17 @@ if onwindows then return pth, (isdir(pth) == true) end - --~ print(dir.mkdirs("","","a","c")) - --~ print(dir.mkdirs("a")) - --~ print(dir.mkdirs("a:")) - --~ print(dir.mkdirs("a:/b/c")) - --~ print(dir.mkdirs("a:b/c")) - --~ print(dir.mkdirs("a:/bbb/c")) - --~ print(dir.mkdirs("/a/b/c")) - --~ print(dir.mkdirs("/aaa/b/c")) - --~ print(dir.mkdirs("//a/b/c")) - --~ print(dir.mkdirs("///a/b/c")) - --~ print(dir.mkdirs("a/bbb//ccc/")) + -- print(dir.mkdirs("","","a","c")) + -- print(dir.mkdirs("a")) + -- print(dir.mkdirs("a:")) + -- print(dir.mkdirs("a:/b/c")) + -- print(dir.mkdirs("a:b/c")) + -- print(dir.mkdirs("a:/bbb/c")) + -- print(dir.mkdirs("/a/b/c")) + -- print(dir.mkdirs("/aaa/b/c")) + -- print(dir.mkdirs("//a/b/c")) + -- print(dir.mkdirs("///a/b/c")) + -- print(dir.mkdirs("a/bbb//ccc/")) else @@ -408,13 +473,13 @@ else return pth, (isdir(pth) == true) end - --~ print(dir.mkdirs("","","a","c")) - --~ print(dir.mkdirs("a")) - --~ print(dir.mkdirs("/a/b/c")) - --~ print(dir.mkdirs("/aaa/b/c")) - --~ print(dir.mkdirs("//a/b/c")) - --~ print(dir.mkdirs("///a/b/c")) - --~ print(dir.mkdirs("a/bbb//ccc/")) + -- print(dir.mkdirs("","","a","c")) + -- print(dir.mkdirs("a")) + -- print(dir.mkdirs("/a/b/c")) + -- print(dir.mkdirs("/aaa/b/c")) + -- print(dir.mkdirs("//a/b/c")) + -- print(dir.mkdirs("///a/b/c")) + -- print(dir.mkdirs("a/bbb//ccc/")) end @@ -424,7 +489,7 @@ dir.makedirs = dir.mkdirs if onwindows then - function dir.expandname(str) -- will be merged with cleanpath and collapsepath + function dir.expandname(str) -- will be merged with cleanpath and collapsepath\ local first, nothing, last = match(str,"^(//)(//*)(.*)$") if first then first = dir.current() .. "/" -- dir.current sanitizes diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua index ea675b081..ca067fb1e 100644 --- a/tex/context/base/l-table.lua +++ b/tex/context/base/l-table.lua @@ -164,14 +164,14 @@ local function sortedhash(t,cmp) end local n = 0 local m = #s - local function kv(s) + local function kv() -- (s) if n < m then n = n + 1 local k = s[n] return k, t[k] end end - return kv, s + return kv -- , s else return nothing end @@ -1114,3 +1114,49 @@ function table.values(t,s) -- optional sort flag return { } end end + +-- maybe this will move to util-tab.lua + +-- for k, v in table.filtered(t,pattern) do ... end +-- for k, v in table.filtered(t,pattern,true) do ... end +-- for k, v in table.filtered(t,pattern,true,cmp) do ... end + +function table.filtered(t,pattern,sort,cmp) + if t and type(pattern) == "string" then + if sort then + local s + if cmp then + -- it would be nice if the sort function would accept a third argument (or nicer, an optional first) + s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end) + else + s = sortedkeys(t) -- the robust one + end + local n = 0 + local m = #s + local function kv(s) + while n < m do + n = n + 1 + local k = s[n] + if find(k,pattern) then + return k, t[k] + end + end + end + return kv, s + else + local n = next(t) + local function iterator() + while n do + local k = n + n = next(t,k) + if find(k,pattern) then + return k, t[k] + end + end + end + return iterator, t + end + else + return nothing + end +end diff --git a/tex/context/base/lpdf-fmt.lua b/tex/context/base/lpdf-fmt.lua index 568b801b4..9a5005782 100644 --- a/tex/context/base/lpdf-fmt.lua +++ b/tex/context/base/lpdf-fmt.lua @@ -710,7 +710,9 @@ function codeinjections.setformat(s) end end function codeinjections.setformat(noname) - report_backend("error, format is already set to %a, ignoring %a",formatname,noname.format) + if trace_format then + report_backend("error, format is already set to %a, ignoring %a",formatname,noname.format) + end end else report_backend("error, format %a is not supported",format) diff --git a/tex/context/base/lpdf-pda.xml b/tex/context/base/lpdf-pda.xml index 2d8e7b6f5..3f6b969c0 100644 --- a/tex/context/base/lpdf-pda.xml +++ b/tex/context/base/lpdf-pda.xml @@ -3,15 +3,20 @@ + application/pdf - + - + + + + + diff --git a/tex/context/base/lpdf-pdx.xml b/tex/context/base/lpdf-pdx.xml index 42e11650e..d55e1fdf3 100644 --- a/tex/context/base/lpdf-pdx.xml +++ b/tex/context/base/lpdf-pdx.xml @@ -8,10 +8,14 @@ application/pdf - + - + + + + + diff --git a/tex/context/base/lpdf-xmp.lua b/tex/context/base/lpdf-xmp.lua index c8b2d236c..739b29ef7 100644 --- a/tex/context/base/lpdf-xmp.lua +++ b/tex/context/base/lpdf-xmp.lua @@ -26,7 +26,7 @@ local pdfconstant = lpdf.constant local pdfreference = lpdf.reference local pdfflushstreamobject = lpdf.flushstreamobject --- I wonder why this begin end is empty / w (no time now to look into it) +-- I wonder why this begin end is empty / w (no time now to look into it) / begin can also be "?" local xpacket = [[ @@ -50,7 +50,7 @@ local mapping = { -- Dublin Core schema ["Author"] = "rdf:Description/dc:creator/rdf:Seq/rdf:li", ["Format"] = "rdf:Description/dc:format", -- optional, but nice to have - ["Subject"] = "rdf:Description/dc:description", + ["Subject"] = "rdf:Description/dc:description/rdf:Alt/rdf:li", ["Title"] = "rdf:Description/dc:title/rdf:Alt/rdf:li", -- XMP Basic schema ["CreateDate"] = "rdf:Description/xmp:CreateDate", @@ -105,7 +105,7 @@ local function valid_xmp() if xmpfile ~= "" then report_xmp("using file %a",xmpfile) end - local xmpdata = (xmpfile ~= "" and io.loaddata(xmpfile)) or "" + local xmpdata = xmpfile ~= "" and io.loaddata(xmpfile) or "" xmp = xml.convert(xmpdata) end return xmp diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua index 98f154b13..700259f4e 100644 --- a/tex/context/base/lxml-tex.lua +++ b/tex/context/base/lxml-tex.lua @@ -19,6 +19,8 @@ local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc local tex, xml = tex, xml local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered +local basename, dirname, joinfile = file.basename, file.dirname, file.join + lxml = lxml or { } local lxml = lxml @@ -39,24 +41,28 @@ local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml. local xmlstripelement = xml.stripelement local xmlinclusion, xmlinclusions = xml.inclusion, xml.inclusions -local variables = interfaces and interfaces.variables or { } +local variables = interfaces and interfaces.variables or { } local settings_to_hash = utilities.parsers.settings_to_hash local insertbeforevalue = utilities.tables.insertbeforevalue local insertaftervalue = utilities.tables.insertaftervalue -local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming +local resolveprefix = resolvers.resolve + +local starttiming = statistics.starttiming +local stoptiming = statistics.stoptiming -local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end) -local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end) -local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end) -local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end) -local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) +local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end) +local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end) +local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end) +local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end) +local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end) -local report_lxml = logs.reporter("xml","tex") -local report_xml = logs.reporter("xml","tex") +local report_lxml = logs.reporter("xml","tex") +local report_xml = logs.reporter("xml","tex") -local forceraw, rawroot = false, nil +local forceraw = false +local forceraw = nil -- tex entities -- @@ -437,37 +443,6 @@ function lxml.register(id,xmltable,filename) return xmltable end --- function lxml.include(id,pattern,attribute,recurse,resolve) --- starttiming(xml) --- local root = getid(id) --- xml.include(root,pattern,attribute,recurse,function(filename) --- if filename then --- -- preprocessing --- filename = commands.preparedfile(filename) --- -- some protection --- if file.dirname(filename) == "" and root.filename then --- local dn = file.dirname(root.filename) --- if dn ~= "" then --- filename = file.join(dn,filename) --- end --- end --- if trace_loading then --- report_lxml("including file %a",filename) --- end --- -- handy if we have a flattened structure --- if resolve then --- filename = resolvers.resolve(filename) or filename --- end --- -- todo: check variants and provide --- noffiles, nofconverted = noffiles + 1, nofconverted + 1 --- return resolvers.loadtexfile(filename) or "" --- else --- return "" --- end --- end) --- stoptiming(xml) --- end - -- recurse prepare rootpath resolve basename local options_true = { "recurse", "prepare", "rootpath" } @@ -493,17 +468,17 @@ function lxml.include(id,pattern,attribute,options) end -- handy if we have a flattened structure if options.basename then - filename = file.basename(filename) + filename = basename(filename) end if options.resolve then - filename = resolvers.resolve(filename) or filename + filename = resolveprefix(filename) or filename end -- some protection if options.rootpath then - if file.dirname(filename) == "" and root.filename then - local dn = file.dirname(root.filename) + if dirname(filename) == "" and root.filename then + local dn = dirname(root.filename) if dn ~= "" then - filename = file.join(dn,filename) + filename = joinfile(dn,filename) end end end diff --git a/tex/context/base/mlib-lua.lua b/tex/context/base/mlib-lua.lua index e3a3ba5d8..fae915c98 100644 --- a/tex/context/base/mlib-lua.lua +++ b/tex/context/base/mlib-lua.lua @@ -21,6 +21,8 @@ local report_luarun = logs.reporter("metapost","lua") local trace_luarun = false trackers.register("metapost.lua",function(v) trace_luarun = v end) local trace_enabled = true +local be_tolerant = true directives.register("metapost.lua.tolerant",function(v) be_tolerant = v end) + mp = mp or { } -- system namespace MP = MP or { } -- user namespace @@ -146,14 +148,52 @@ local f_code = formatters["%s return mp._f_()"] local cache, n = { }, 0 -- todo: when > n then reset cache or make weak +-- function metapost.runscript(code) +-- if trace_enabled and trace_luarun then +-- report_luarun("code: %s",code) +-- end +-- local f +-- if n > 100 then +-- cache = nil -- forget about caching +-- f = loadstring(f_code(code)) +-- else +-- f = cache[code] +-- if not f then +-- f = loadstring(f_code(code)) +-- if f then +-- n = n + 1 +-- cache[code] = f +-- end +-- end +-- end +-- if f then +-- local result = f() +-- if result then +-- local t = type(result) +-- if t == "number" then +-- return f_numeric(result) +-- elseif t == "string" then +-- return result +-- else +-- return tostring(result) +-- end +-- end +-- end +-- return "" +-- end + function metapost.runscript(code) - if trace_enabled and trace_luarun then + local trace = trace_enabled and trace_luarun + if trace then report_luarun("code: %s",code) end local f if n > 100 then cache = nil -- forget about caching f = loadstring(f_code(code)) + if not f and be_tolerant then + f = loadstring(code) + end else f = cache[code] if not f then @@ -161,6 +201,12 @@ function metapost.runscript(code) if f then n = n + 1 cache[code] = f + elseif be_tolerant then + f = loadstring(code) + if f then + n = n + 1 + cache[code] = f + end end end end @@ -169,13 +215,21 @@ function metapost.runscript(code) if result then local t = type(result) if t == "number" then - return f_numeric(result) + t = f_numeric(result) elseif t == "string" then - return result + t = result else - return tostring(result) + t = tostring(result) end + if trace then + report_luarun("result: %s",code) + end + return t + elseif trace then + report_luarun("no result") end + else + report_luarun("no result, invalid code") end return "" end @@ -208,5 +262,8 @@ mp.number = mp.numeric function metapost.initializescriptrunner(mpx,trialrun) currentmpx = mpx - trace_enabled = not trialrun + if trace_luarun then + report_luarun("type of run: %s", trialrun and "trial" or "final") + end + -- trace_enabled = not trialrun blocks too much end diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua index d9cc167d8..3728b0433 100644 --- a/tex/context/base/mult-low.lua +++ b/tex/context/base/mult-low.lua @@ -142,7 +142,7 @@ return { "startcontextdefinitioncode", "stopcontextdefinitioncode", "texdefinition", -- - "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", + "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup", "fastsetup", "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler", -- "newmode", "setmode", "resetmode", diff --git a/tex/context/base/publ-aut.lua b/tex/context/base/publ-aut.lua index 72766763d..b35af1bcc 100644 --- a/tex/context/base/publ-aut.lua +++ b/tex/context/base/publ-aut.lua @@ -11,12 +11,13 @@ if not characters then dofile(resolvers.findfile("char-ini.lua")) end +local lpeg = lpeg + local context = context local chardata = characters.data local tostring = tostring local concat = table.concat -local lpeg = lpeg local utfchar = utf.char local formatters = string.formatters @@ -204,6 +205,8 @@ local function splitauthorstring(str) return authors end +authors.splitstring = splitauthorstring + -- local function splitauthors(dataset,tag,field) -- local entries = datasets[dataset] -- local luadata = entries.luadata @@ -214,214 +217,38 @@ end -- if not entry then -- return { } -- end --- return splitauthorstring(entry[field]) +-- return splitauthorstring(entry[field]) -- end local function the_initials(initials,symbol) - local t, symbol = { }, symbol or "." - for i=1,#initials do - t[i] = initials[i] .. symbol - end - return t -end - --- authors - -local settings = { } - --- local defaultsettings = { --- firstnamesep = " ", --- initialsep = " ", --- vonsep = " ", --- surnamesep = " ", --- juniorsep = " ", --- surnamejuniorsep = ", ", --- juniorjuniorsep = ", ", --- surnamefirstnamesep = ", ", --- surnameinitialsep = ", ", --- namesep = ", ", --- lastnamesep = " and ", --- finalnamesep = " and ", --- etallimit = 1000, --- etaldisplay = 1000, --- etaltext = "", --- } - -local defaultsettings = { - firstnamesep = [[\btxlistvariantparameter{firstnamesep}]], - vonsep = [[\btxlistvariantparameter{vonsep}]], - surnamesep = [[\btxlistvariantparameter{surnamesep}]], - juniorsep = [[\btxlistvariantparameter{juniorsep}]], - surnamejuniorsep = [[\btxlistvariantparameter{surnamejuniorsep}]], - juniorjuniorsep = [[\btxlistvariantparameter{juniorjuniorsep}]], - surnamefirstnamesep = [[\btxlistvariantparameter{surnamefirstnamesep}]], - surnameinitialsep = [[\btxlistvariantparameter{surnameinitialsep}]], - initialsep = [[\btxlistvariantparameter{initialsep}]], - namesep = [[\btxlistvariantparameter{namesep}]], - lastnamesep = [[\btxlistvariantparameter{lastnamesep}]], - finalnamesep = [[\btxlistvariantparameter{finalnamesep}]], - -- - etaltext = [[\btxlistvariantparameter{etaltext}]], - -- - etallimit = 1000, - etaldisplay = 1000, -} - -function authors.setsettings(s) -end - -authors.splitstring = splitauthorstring - --- [firstnames] [firstnamesep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (Taco, von Hoekwater, jr) - -function authors.normal(author,settings) - local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors - local result, settings = { }, settings or defaultsettings - if firstnames and #firstnames > 0 then - result[#result+1] = concat(firstnames," ") - result[#result+1] = settings.firstnamesep or defaultsettings.firstnamesep - end - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames and #surnames > 0 then - result[#result+1] = concat(surnames," ") - if juniors and #juniors > 0 then - result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep - result[#result+1] = concat(juniors," ") - end - elseif juniors and #juniors > 0 then - result[#result+1] = concat(juniors," ") - end - return concat(result) -end - --- [initials] [initialsep] [vons] [vonsep] [surnames] [juniors] [surnamesep] (T, von Hoekwater, jr) - -function authors.normalshort(author,settings) - local initials, vons, surnames, juniors = author.initials, author.vons, author.surnames, author.juniors - local result, settings = { }, settings or defaultsettings - if initials and #initials > 0 then - result[#result+1] = concat(the_initials(initials)," ") - result[#result+1] = settings.initialsep or defaultsettings.initialsep - end - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames and #surnames > 0 then - result[#result+1] = concat(surnames," ") - if juniors and #juniors > 0 then - result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep - result[#result+1] = concat(juniors," ") - end - elseif juniors and #juniors > 0 then - result[#result+1] = concat(juniors," ") - end - return concat(result) -end - --- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [firstnames] (von Hoekwater jr, Taco) - -function authors.inverted(author,settings) - local firstnames, vons, surnames, juniors = author.firstnames, author.vons, author.surnames, author.juniors - local result, settings = { }, settings or defaultsettings - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames and #surnames > 0 then - result[#result+1] = concat(surnames," ") - if juniors and #juniors > 0 then - result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep - result[#result+1] = concat(juniors," ") - end - elseif juniors and #juniors > 0 then - result[#result+1] = concat(juniors," ") - end - if firstnames and #firstnames > 0 then - result[#result+1] = settings.surnamefirstnamesep or defaultsettings.surnamefirstnamesep - result[#result+1] = concat(firstnames," ") - end - return concat(result) -end - --- [vons] [vonsep] [surnames] [surnamejuniorsep] [juniors] [surnamefirstnamesep] [initials] (von Hoekwater jr, T) - -function authors.invertedshort(author,settings) - local vons, surnames, initials, juniors = author.vons, author.surnames, author.initials, author.juniors - local result, settings = { }, settings or defaultsettings - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames and #surnames > 0 then - result[#result+1] = concat(surnames," ") - if juniors and #juniors > 0 then - result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep - result[#result+1] = concat(juniors," ") - end - elseif juniors and #juniors > 0 then - result[#result+1] = concat(juniors," ") - end - if initials and #initials > 0 then - result[#result+1] = settings.surnameinitialsep or defaultsettings.surnameinitialsep - result[#result+1] = concat(the_initials(initials)," ") - end - return concat(result) -end - --- [vons] [vonsep] [surnames] - -function authors.name(author,settings) - local vons, surnames = author.vons, author.surnames - local result, settings = { }, settings or defaultsettings - if vons and #vons > 0 then - result[#result+1] = concat(vons," ") - result[#result+1] = settings.vonsep or defaultsettings.vonsep - end - if surnames and #surnames > 0 then - result[#result+1] = concat(surnames," ") - if juniors and #juniors > 0 then - result[#result+1] = settings.surnamejuniorsep or defaultsettings.surnamejuniorsep - result[#result+1] = concat(juniors," ") - end - end - return concat(result) -end - -local lastconcatsize = 1 - -local function concatnames(t,settings) - local namesep = settings.namesep - local lastnamesep = settings.lastnamesep - local finalnamesep = settings.finalnamesep - local lastconcatsize = #t - if lastconcatsize > 2 then - local s = { } - for i=1,lastconcatsize-2 do - s[i] = t[i] .. namesep - end - s[lastconcatsize-1], s[lastconcatsize] = t[lastconcatsize-1] .. finalnamesep, t[lastconcatsize] - return concat(s) - elseif lastconcatsize > 1 then - return concat(t,lastnamesep) - elseif lastconcatsize > 0 then - return t[1] + if not symbol or symbol == "" then + return initials else - return "" + local result = { } + for i=1,#initials do + result[i] = initials[i] .. symbol + end + return result end end -local f_invalid = formatters[""] - -function authors.concat(dataset,tag,field,settings) - table.setmetatableindex(settings,defaultsettings) - local combiner = settings.combiner - if not combiner or type(combiner) == "string" then - combiner = authors[combiner or "normal"] or authors.normal - end +local ctx_btxsetconcat = context.btxsetconcat +local ctx_btxsetoverflow = context.btxsetoverflow +local ctx_btxsetinitials = context.btxsetinitials +local ctx_btxsetfirstnames = context.btxsetfirstnames +local ctx_btxsetvons = context.btxsetvons +local ctx_btxsetsurnames = context.btxsetsurnames +local ctx_btxsetjuniors = context.btxsetjuniors +local ctx_btxciteauthorsetup = context.btxciteauthorsetup +local ctx_btxlistauthorsetup = context.btxlistauthorsetup +local ctx_btxsetauthorvariant = context.btxsetauthorvariant +local ctx_btxstartauthor = context.btxstartauthor +local ctx_btxstopauthor = context.btxstopauthor + +local concatstate = publications.concatstate +local f_invalid = formatters[""] + +function commands.btxauthor(dataset,tag,field,settings) local ds = datasets[dataset] if not ds then return f_invalid("dataset",dataset) @@ -434,31 +261,58 @@ function authors.concat(dataset,tag,field,settings) if not split then return f_invalid("field",field) end - local etallimit = settings.etallimit or 1000 - local etaldisplay = settings.etaldisplay or etallimit local max = split and #split or 0 if max == 0 then -- error end + local etallimit = tonumber(settings.etallimit) or 1000 + local etaldisplay = tonumber(settings.etaldisplay) or etallimit + local combiner = settings.combiner + local symbol = settings.symbol + if not combiner or combiner == "" then + combiner = "normal" + end + if not symbol then + symbol = "." + end + local ctx_btxsetup = settings.kind == "cite" and ctx_btxciteauthorsetup or ctx_btxlistauthorsetup if max > etallimit and etaldisplay < max then max = etaldisplay end - local combined = { } for i=1,max do - combined[i] = combiner(split[i],settings) + ctx_btxstartauthor() -- i, max + ctx_btxsetconcat(concatstate(i,max)) + ctx_btxsetauthorvariant(combiner) + local author = split[i] + local initials = author.initials + if initials then + ctx_btxsetinitials(concat(the_initials(initials,symbol)," ")) + end + local firstnames = author.firstnames + if firstnames then + ctx_btxsetfirstnames(concat(firstnames," ")) + end + local vons = author.vons + if vons then + ctx_btxsetvons(concat(vons," ")) + end + local surnames = author.surnames + if surnames then + ctx_btxsetsurnames(concat(surnames," ")) + end + local juniors = author.juniors + if juniors then + ctx_btxsetjuniors(concat(juniors," ")) + end + ctx_btxsetup(combiner) + ctx_btxstopauthor() end - local result = concatnames(combined,settings) - if #combined <= max then - return result - else - return result .. settings.etaltext + local overflow = max - #split + if overflow > 0 then + ctx_btxsetoverflow(overflow) end end -function commands.btxauthor(...) - context(authors.concat(...)) -end - -- We can consider creating a hashtable key -> entry but I wonder if -- pays off. diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua index fdb77b6d3..986ef75c2 100644 --- a/tex/context/base/publ-dat.lua +++ b/tex/context/base/publ-dat.lua @@ -639,3 +639,19 @@ end -- print(table.serialize(dataset.xmldata)) -- print(table.serialize(dataset.shortcuts)) -- print(xml.serialize(dataset.xmldata)) + +-- a helper: + +function publications.concatstate(i,n) + if i == 0 then + return 0 + elseif i == 1 then + return 1 + elseif i == 2 and n == 2 then + return 4 + elseif i == n then + return 3 + else + return 2 + end +end diff --git a/tex/context/base/publ-imp-author.mkvi b/tex/context/base/publ-imp-author.mkvi new file mode 100644 index 000000000..e21353f63 --- /dev/null +++ b/tex/context/base/publ-imp-author.mkvi @@ -0,0 +1,278 @@ +%D \module +%D [ file=publ-imp-author, +%D version=2014.06.23, +%D title=\CONTEXT\ Publication Support, +%D subtitle=Authors, +%D author=Hans Hagen, +%D date=\currentdate, +%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}] +%C +%C This module is part of the \CONTEXT\ macro||package and is +%C therefore copyrighted by \PRAGMA. See mreadme.pdf for +%C details. + +\unprotect + +% We can do a better check for pre-sep-post at the lua end but by keeping it at the +% tex end users can easier mess with it. So, we just assume sane names. +% +% maybe cite will just inherit from list (only \current.. alias) +% +% \startsetups \s!btx:\s!cite:\s!author:normal +% \fastsetup{\s!btx:\s!list:\s!author:normal} +% \stopsetups + +% You can adapt these setups to your liking, for instance as: + +% \startsetups btx:cite:author:normal +% \fastsetup{btx:cite:author:concat} +% \ifx\currentbtxfirstnames\empty \else +% \begingroup +% \bf +% \currentbtxfirstnames +% \endgroup +% \btxcitevariantparameter{firstnamesep} +% \fi +% \ifx\currentbtxvons\empty \else +% \currentbtxvons +% \btxcitevariantparameter{vonsep} +% \fi +% \ifx\currentbtxsurnames\empty \else +% \currentbtxsurnames +% \ifx\currentbtxjuniors\empty \else +% \btxcitevariantparameter{juniorsep} +% \currentbtxjuniors +% \fi +% \fi +% \fastsetup{btx:cite:author:etaltext} +% \stopsetups + +\startsetups \s!btx:\s!cite:\s!author:concat + \ifcase\currentbtxconcat \or \or + \btxcitevariantparameter\c!namesep + \or + \btxcitevariantparameter\c!lastnamesep + \or + \btxcitevariantparameter\c!finalnamesep + \fi +\stopsetups + +\startsetups \s!btx:\s!cite:\s!author:etaltext + \ifcase\currentbtxoverflow \else + \btxcitevariantparameter\c!etaltext + \fi +\stopsetups + +\startsetups \s!btx:\s!cite:\s!author:normal + \fastsetup{\s!btx:\s!cite:\s!author:concat} + \ifx\currentbtxfirstnames\empty \else + \currentbtxfirstnames + \btxcitevariantparameter\c!firstnamesep + \fi + \ifx\currentbtxvons\empty \else + \currentbtxvons + \ifx\currentbtxsurnames\empty \else + \btxcitevariantparameter\c!vonsep + \fi + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxcitevariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \fastsetup{\s!btx:\s!cite:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!cite:\s!author:normalshort + \fastsetup{\s!btx:\s!cite:\s!author:concat} + \ifx\currentbtxinitials\empty \else + \currentbtxinitials + \btxcitevariantparameter\c!initialsep + \fi + \ifx\currentbtxvons\empty \else + \currentbtxvons + \ifx\currentbtxsurnames\empty \else + \btxcitevariantparameter\c!vonsep + \fi + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxcitevariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \fastsetup{\s!btx:\s!cite:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!cite:\s!author:inverted + \fastsetup{\s!btx:\s!cite:\s!author:concat} + \ifx\currentbtxvons\empty \else + \currentbtxvons + \btxcitevariantparameter\c!vonsep + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxcitevariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \ifx\currentbtxfirstnames\empty + % firstnames are optional + \else + \btxcitevariantparameter\c!surnamefirstnamesep + \currentbtxfirstnames + \fi + \fastsetup{\s!btx:\s!cite:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!cite:\s!author:invertedshort + \fastsetup{\s!btx:\s!cite:\s!author:concat} + \ifx\currentbtxvons\empty \else + \currentbtxvons + \btxcitevariantparameter\c!vonsep + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxcitevariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \ifx\currentbtxinitials\empty + % initials are optional + \else + \btxcitevariantparameter\c!surnameinitialsep + \currentbtxinitials + \fi + \fastsetup{\s!btx:\s!cite:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!cite:\s!author:name + \fastsetup{\s!btx:\s!cite:\s!author:concat} + \ifx\currentbtxvons\empty \else + \currentbtxvons + \btxcitevariantparameter\c!vonsep + \fi + \currentbtxsurnames + \fastsetup{\s!btx:\s!cite:\s!author:etaltext} +\stopsetups + +% list (mostly the same) + +\startsetups \s!btx:\s!list:\s!author:concat + \ifcase\currentbtxconcat \or \or + \btxlistvariantparameter\c!namesep + \or + \btxlistvariantparameter\c!lastnamesep + \or + \btxlistvariantparameter\c!finalnamesep + \fi +\stopsetups + +\startsetups \s!btx:\s!list:\s!author:etaltext + \btxcitevariantparameter\c!etaltext +\stopsetups + +\startsetups \s!btx:\s!list:\s!author:normal + \fastsetup{\s!btx:\s!list:\s!author:concat} + \ifx\currentbtxfirstnames\empty \else + \currentbtxfirstnames + \btxlistvariantparameter\c!firstnamesep + \fi + \ifx\currentbtxvons\empty \else + \currentbtxvons + \ifx\currentbtxsurnames\empty \else + \btxlistvariantparameter\c!vonsep + \fi + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxlistvariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \fastsetup{\s!btx:\s!list:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!list:\s!author:normalshort + \fastsetup{\s!btx:\s!list:\s!author:concat} + \ifx\currentbtxinitials\empty \else + \currentbtxinitials + \btxlistvariantparameter\c!initialsep + \fi + \ifx\currentbtxvons\empty \else + \currentbtxvons + \ifx\currentbtxsurnames\empty \else + \btxlistvariantparameter\c!vonsep + \fi + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxlistvariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \fastsetup{\s!btx:\s!list:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!list:\s!author:inverted + \fastsetup{\s!btx:\s!list:\s!author:concat} + \ifx\currentbtxvons\empty \else + \currentbtxvons + \btxlistvariantparameter\c!vonsep + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxlistvariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \ifx\currentbtxfirstnames\empty + % firstnames are optional + \else + \btxlistvariantparameter\c!surnamefirstnamesep + \currentbtxfirstnames + \fi + \fastsetup{\s!btx:\s!list:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!list:\s!author:invertedshort + \fastsetup{\s!btx:\s!list:\s!author:concat} + \ifx\currentbtxvons\empty \else + \currentbtxvons + \btxlistvariantparameter\c!vonsep + \fi + \ifx\currentbtxsurnames\empty \else + \currentbtxsurnames + \ifx\currentbtxjuniors\empty \else + \btxlistvariantparameter\c!juniorsep + \currentbtxjuniors + \fi + \fi + \ifx\currentbtxinitials\empty + % initials are optional + \else + \btxlistvariantparameter\c!surnameinitialsep + \currentbtxinitials + \fi + \fastsetup{\s!btx:\s!list:\s!author:etaltext} +\stopsetups + +\startsetups \s!btx:\s!list:\s!author:name + \fastsetup{\s!btx:\s!list:\s!author:concat} + \ifx\currentbtxvons\empty \else + \currentbtxvons + \btxlistvariantparameter\c!vonsep + \fi + \currentbtxsurnames + \fastsetup{\s!btx:\s!list:\s!author:etaltext} +\stopsetups + +\protect diff --git a/tex/context/base/publ-imp-cite.mkvi b/tex/context/base/publ-imp-cite.mkvi index 1580062bf..6ef584699 100644 --- a/tex/context/base/publ-imp-cite.mkvi +++ b/tex/context/base/publ-imp-cite.mkvi @@ -59,7 +59,7 @@ \startsetups \s!btx:\s!cite:common:normal \ifx\currentbtxfirst\empty - \directsetup{\s!btx:\s!cite:\s!unknown} + \fastsetup{\s!btx:\s!cite:\s!unknown} \else\ifx\currentbtxsecond\empty \btxcitereference \currentbtxfirst @@ -73,7 +73,7 @@ \startsetups \s!btx:\s!cite:common:range \ifx\currentbtxfirst\empty - \directsetup{\s!btx:\s!cite:\s!unknown} + \fastsetup{\s!btx:\s!cite:\s!unknown} \else\ifx\currentbtxsecond\empty \btxcitereference \currentbtxfirst @@ -96,9 +96,9 @@ \stopsetups \startsetups \s!btx:\s!cite:render:normal - \directsetup{\s!btx:\s!cite:concat} + \fastsetup{\s!btx:\s!cite:concat} \ifx\currentbtxfirst\empty - \directsetup{\s!btx:\s!cite:\s!unknown} + \fastsetup{\s!btx:\s!cite:\s!unknown} \else \texdefinition {btx:cite:inject} { \btxcitereference @@ -112,9 +112,9 @@ \stopsetups \startsetups \s!btx:\s!cite:render:range - \directsetup{\s!btx:\s!cite:concat} + \fastsetup{\s!btx:\s!cite:concat} \ifx\currentbtxfirst\empty - \directsetup{\s!btx:\s!cite:missing} + \fastsetup{\s!btx:\s!cite:missing} \else \texdefinition {btx:cite:inject} { \btxcitereference @@ -128,103 +128,101 @@ \stopsetups \startsetups \s!btx:\s!cite:render:variant - \directsetup{\s!btx:\s!cite:concat} + \fastsetup{\s!btx:\s!cite:concat} \texdefinition {btx:cite:inject} { - \directsetup{\s!btx:\s!cite:render:\currentbtxcitevariant} + \fastsetup{\s!btx:\s!cite:render:\currentbtxcitevariant} } \stopsetups -% author lists: can be less - \startsetups \s!btx:\s!cite:common:author \ifx\currentbtxfirst\empty - \directsetup{\s!btx:\s!cite:\s!unknown} - \else\ifx\currentbtxcitevariant\v!normal - \btxcitereference - \currentbtxfirst + \fastsetup{\s!btx:\s!cite:\s!unknown} \else \btxcitereference - \currentbtxciteauthor - \fi\fi + \currentbtxfirst + \fi \ifx\currentbtxsecond\empty \else + \relax % keeps a following space \btxcitevariantparameter\v!inbetween \currentbtxsecond \fi \stopsetups +% one level will be removed + \startsetups \s!btx:\s!cite:render:author - \directsetup{\s!btx:\s!cite:common:author} + \fastsetup{\s!btx:\s!cite:common:author} \stopsetups \startsetups \s!btx:\s!cite:render:authoryear - \directsetup{\s!btx:\s!cite:common:author} + \fastsetup{\s!btx:\s!cite:common:author} \stopsetups \startsetups \s!btx:\s!cite:render:authoryears - \directsetup{\s!btx:\s!cite:common:author} + \fastsetup{\s!btx:\s!cite:common:author} \stopsetups \startsetups \s!btx:\s!cite:render:authornum - \directsetup{\s!btx:\s!cite:common:author} + \fastsetup{\s!btx:\s!cite:common:author} \stopsetups \startsetups \s!btx:\s!cite:author:num - \directsetup{\s!btx:\s!cite:render:range} + \fastsetup{\s!btx:\s!cite:render:range} \stopsetups \startsetups \s!btx:\s!cite:author:year - \directsetup{\s!btx:\s!cite:render:range} + \fastsetup{\s!btx:\s!cite:render:range} \stopsetups \startsetups \s!btx:\s!cite:author:years - \directsetup{\s!btx:\s!cite:render:range} + \fastsetup{\s!btx:\s!cite:render:range} \stopsetups \startsetups \s!btx:\s!cite:author - \directsetup{\s!btx:\s!cite:render:variant} + \fastsetup{\s!btx:\s!cite:render:variant} \stopsetups \startsetups \s!btx:\s!cite:authoryear - \directsetup{\s!btx:\s!cite:render:variant} + \fastsetup{\s!btx:\s!cite:render:variant} \stopsetups \startsetups \s!btx:\s!cite:authoryears - \directsetup{\s!btx:\s!cite:render:variant} + \fastsetup{\s!btx:\s!cite:render:variant} \stopsetups \startsetups \s!btx:\s!cite:authornum - \directsetup{\s!btx:\s!cite:render:variant} + \fastsetup{\s!btx:\s!cite:render:variant} \stopsetups \startsetups \s!btx:\s!cite:year - \directsetup{\s!btx:\s!cite:render:range} + \fastsetup{\s!btx:\s!cite:render:range} \stopsetups \startsetups \s!btx:\s!cite:short - \directsetup{\s!btx:\s!cite:render:normal} + \fastsetup{\s!btx:\s!cite:render:normal} \stopsetups \startsetups \s!btx:\s!cite:serial - \directsetup{\s!btx:\s!cite:render:range} + \fastsetup{\s!btx:\s!cite:render:range} \stopsetups \startsetups \s!btx:\s!cite:tag - \directsetup{\s!btx:\s!cite:render:normal} + \fastsetup{\s!btx:\s!cite:render:normal} \stopsetups \startsetups \s!btx:\s!cite:key - \directsetup{\s!btx:\s!cite:render:normal} + \fastsetup{\s!btx:\s!cite:render:normal} \stopsetups %startsetups \s!btx:\s!cite:doi -% \directsetup{\s!btx:\s!cite:render:normal} +% \fastsetup{\s!btx:\s!cite:render:normal} %stopsetups %startsetups \s!btx:\s!cite:url -% \directsetup{\s!btx:\s!cite:render:normal} +% \fastsetup{\s!btx:\s!cite:render:normal} %stopsetups \startsetups \s!btx:\s!cite:category - \directsetup{\s!btx:\s!cite:render:normal} + \fastsetup{\s!btx:\s!cite:render:normal} \stopsetups \startsetups \s!btx:\s!cite:type - \directsetup{\s!btx:\s!cite:render:normal} + \fastsetup{\s!btx:\s!cite:render:normal} \stopsetups \startsetups \s!btx:\s!cite:num - \directsetup{\s!btx:\s!cite:render:range} + \fastsetup{\s!btx:\s!cite:render:range} \stopsetups \startsetups \s!btx:\s!cite:page - \directsetup{\s!btx:\s!cite:render:normal} + \fastsetup{\s!btx:\s!cite:render:normal} \stopsetups \startsetups \s!btx:\s!cite:render:doi \ifx\currentbtxfirst\empty - \directsetup{\s!btx:\s!cite:\s!unknown} + \fastsetup{\s!btx:\s!cite:\s!unknown} \else \btxcitereference \hyphenatedurl{\currentbtxfirst} @@ -233,7 +231,7 @@ \startsetups \s!btx:\s!cite:url \ifx\currentbtxfirst\empty - \directsetup{\s!btx:\s!cite:\s!unknown} + \fastsetup{\s!btx:\s!cite:\s!unknown} \else\ifconditional\btxinteractive \goto { \btxcitereference diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua index d88c3e381..4d4c9ef09 100644 --- a/tex/context/base/publ-ini.lua +++ b/tex/context/base/publ-ini.lua @@ -90,12 +90,10 @@ local ctx_firstoftwoarguments = context.firstoftwoarguments local ctx_secondoftwoarguments = context.secondoftwoarguments local ctx_firstofoneargument = context.firstofoneargument local ctx_gobbleoneargument = context.gobbleoneargument ------ ctx_directsetup = context.directsetup local ctx_btxlistparameter = context.btxlistparameter local ctx_btxcitevariantparameter = context.btxcitevariantparameter local ctx_btxlistvariantparameter = context.btxlistvariantparameter ------ ctx_btxdomarkcitation = context.btxdomarkcitation local ctx_btxdirectlink = context.btxdirectlink local ctx_btxhandlelistentry = context.btxhandlelistentry local ctx_btxchecklistentry = context.btxchecklistentry @@ -107,7 +105,6 @@ local ctx_btxmissing = context.btxmissing local ctx_btxsetdataset = context.btxsetdataset local ctx_btxsettag = context.btxsettag local ctx_btxsetlanguage = context.btxsetlanguage -local ctx_btxsetindex = context.btxsetindex local ctx_btxsetcombis = context.btxsetcombis local ctx_btxsetcategory = context.btxsetcategory local ctx_btxcitesetup = context.btxcitesetup @@ -117,8 +114,12 @@ local ctx_btxsetinternal = context.btxsetinternal local ctx_btxsetbacklink = context.btxsetbacklink local ctx_btxsetbacktrace = context.btxsetbacktrace local ctx_btxsetcount = context.btxsetcount ------ ctx_btxsetrealpage = context.btxsetrealpage local ctx_btxsetconcat = context.btxsetconcat +local ctx_btxsetoveflow = context.btxsetoverflow +local ctx_btxstartcite = context.btxstartcite +local ctx_btxstopcite = context.btxstopcite +local ctx_btxstartciteauthor = context.btxstartciteauthor +local ctx_btxstopciteauthor = context.btxstopciteauthor local ctx_btxstartsubcite = context.btxstartsubcite local ctx_btxstopsubcite = context.btxstopsubcite local ctx_btxlistsetup = context.btxlistsetup @@ -442,33 +443,24 @@ local function findallused(dataset,reference,internal) return okay, todo, tags end -local function flushcollected(reference,flush,nofcollected) - if nofcollected > 0 then - flush(1,1) - if nofcollected > 2 then - for i=2,nofcollected-1 do - flush(i,2) - end - flush(nofcollected,3) - elseif nofcollected > 1 then - flush(nofcollected,4) - end - else - ctx_btxsettag(reference) - ctx_btxcitesetup("unknown") - end +local function unknowncite(reference) + ctx_btxsettag(reference) + ctx_btxcitesetup("unknown") end +local concatstate = publications.concatstate + local tobemarked = nil function marknocite(dataset,tag,nofcitations) -- or just: ctx_btxdomarkcitation - ctx_btxsetdataset(dataset) - ctx_btxsettag(tag) - ctx_btxsetbacklink(nofcitations) - ctx_btxcitesetup("nocite") + ctx_btxstartcite() + ctx_btxsetdataset(dataset) + ctx_btxsettag(tag) + ctx_btxsetbacklink(nofcitations) + ctx_btxcitesetup("nocite") + ctx_btxstopcite() end - local function markcite(dataset,tag,flush) if not tobemarked then return 0 @@ -1169,7 +1161,6 @@ function lists.flushentries(dataset,sorttype) local tag = li[1] local entry = luadata[tag] if entry and (forceall or repeated or not used[tag]) then - ctx_btxsetindex(i) local combined = entry.combined if combined then ctx_btxsetcombis(concat(combined,",")) @@ -1448,9 +1439,10 @@ local function processcite(dataset,reference,mark,compress,setup,internal,getter source[i] = data end - local function flushindeed(state,entry,tag) + local function flush(i,n,entry,tag) local tag = tag or entry.tag local currentcitation = markcite(dataset,tag) + ctx_btxstartcite() ctx_btxsettag(tag) ctx_btxsetbacklink(currentcitation) local bl = listtocite[currentcitation] @@ -1468,27 +1460,36 @@ local function processcite(dataset,reference,mark,compress,setup,internal,getter if not setter(entry,entry.last) then ctx_btxsetfirst(f_missing(tag)) end - ctx_btxsetconcat(state) + ctx_btxsetconcat(concatstate(i,n)) ctx_btxcitesetup(setup) + ctx_btxstopcite() end if compress and not badkey then local target = (compressor or compresslist)(source) - local function flush(i,state) - local entry = target[i] - local first = entry.first - if first then - flushindeed(state,first,list[1]) -- somewhat messy as we can be sorted so this needs checking! might be wrong - else - flushindeed(state,entry) + local nofcollected = #target + if nofcollected == 0 then + unknowncite(reference) + else + for i=1,nofcollected do + local entry = target[i] + local first = entry.first + if first then + flush(i,nofcollected,first,list[1]) -- somewhat messy as we can be sorted so this needs checking! might be wrong + else + flush(i,nofcollected,entry) + end end end - flushcollected(reference,flush,#target) else - local function flush(i,state) - flushindeed(state,source[i]) + local nofcollected = #source + if nofcollected == 0 then + unknowncite(reference) + else + for i=1,nofcollected do + flush(i,nofcollected,source[i]) + end end - flushcollected(reference,flush,#source) end end if tobemarked then @@ -1713,26 +1714,6 @@ function citevariants.tag(dataset,reference,mark,compress,variant,internal) return processcite(dataset,reference,mark,compress,"tag",internal,setter,getter) end --- author - -local function setter(dataset,tag,entry,internal) - return { - dataset = dataset, - tag = tag, - internal = internal, - author = getfield(dataset,tag,"author"), - } -end - -local function getter(first,last) - ctx_btxsetfirst(first.author) -- todo: formatted - return true -end - -function citevariants.author(dataset,reference,mark,compress,variant,internal) - processcite(dataset,reference,mark,false,"author",internal,setter,getter) -end - -- todo : sort -- todo : choose between publications or commands namespace -- todo : use details.author @@ -1743,6 +1724,11 @@ end -- common +local currentbtxciteauthor = function() + context.currentbtxciteauthor() + return true -- needed? +end + local function authorcompressor(found) local result = { } local entries = { } @@ -1775,43 +1761,50 @@ local function authorcompressor(found) end local function authorconcat(target,key,setup) - local function flush(i,state) - local entry = target[i] - local first = entry.first - local tag = entry.tag - local currentcitation = markcite(entry.dataset,tag) - ctx_btxsettag(tag) - ctx_btxsetbacklink(currentcitation) - local bl = listtocite[currentcitation] - ctx_btxsetinternal(bl and bl.references.internal or "") - if first then - ctx_btxsetfirst(first[key] or f_missing(first.tag)) - local suffix = entry.suffix - local value = entry.last[key] - if suffix then - ctx_btxsetsecond(value .. converters.characters(suffix)) - else - ctx_btxsetsecond(value) - end - else - local suffix = entry.suffix - local value = entry[key] or f_missing(tag) - if suffix then - ctx_btxsetfirst(value .. converters.characters(suffix)) + ctx_btxstartsubcite(setup) + local nofcollected = #target + if nofcollected == 0 then + unknowncite(tag) + else + for i=1,nofcollected do + local entry = target[i] + local first = entry.first + local tag = entry.tag + local currentcitation = markcite(entry.dataset,tag) + ctx_btxstartciteauthor() + ctx_btxsettag(tag) + ctx_btxsetbacklink(currentcitation) + local bl = listtocite[currentcitation] + ctx_btxsetinternal(bl and bl.references.internal or "") + if first then + ctx_btxsetfirst(first[key] or f_missing(first.tag)) + local suffix = entry.suffix + local value = entry.last[key] + if suffix then + ctx_btxsetsecond(value .. converters.characters(suffix)) + else + ctx_btxsetsecond(value) + end else - ctx_btxsetfirst(value) + local suffix = entry.suffix + local value = entry[key] or f_missing(tag) + if suffix then + ctx_btxsetfirst(value .. converters.characters(suffix)) + else + ctx_btxsetfirst(value) + end end + ctx_btxsetconcat(concatstate(i,nofcollected)) + ctx_btxcitesetup(setup) + ctx_btxstopciteauthor() end - ctx_btxsetconcat(state) - ctx_btxcitesetup(setup) end - ctx_btxstartsubcite(setup) - flushcollected(setup,flush,#target) ctx_btxstopsubcite() end local function authorsingle(entry,key,setup) ctx_btxstartsubcite(setup) + ctx_btxstartciteauthor() local tag = entry.tag ctx_btxsettag(tag) -- local currentcitation = markcite(entry.dataset,tag) @@ -1820,13 +1813,15 @@ local function authorsingle(entry,key,setup) -- ctx_btxsetinternal(bl and bl.references.internal or "") ctx_btxsetfirst(entry[key] or f_missing(tag)) ctx_btxcitesetup(setup) + ctx_btxstopciteauthor() ctx_btxstopsubcite() end local partialinteractive = false local function authorgetter(first,last,key,setup) -- only first - ctx_btxsetfirst(first.author) -- todo: reformat + -- ctx_btxsetfirst(first.author) -- unformatted + ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower) local entries = first.entries -- alternatively we can use a concat with one ... so that we can only make the -- year interactive, as with the concat @@ -1835,15 +1830,38 @@ local function authorgetter(first,last,key,setup) -- only first end if entries then local c = compresslist(entries) + local f = function() authorconcat(c,key,setup) return true end -- indeed return true? ctx_btxsetcount(#c) - ctx_btxsetsecond(function() authorconcat(c,key,setup) return true end) -- indeed return true? + ctx_btxsetsecond(f) else + local f = function() authorsingle(first,key,setup) return true end -- indeed return true? ctx_btxsetcount(0) - ctx_btxsetsecond(function() authorsingle(first,key,setup) return true end) -- indeed return true? + ctx_btxsetsecond(f) end return true end +-- author + +local function setter(dataset,tag,entry,internal) + return { + dataset = dataset, + tag = tag, + internal = internal, + author = getfield(dataset,tag,"author"), + } +end + +local function getter(first,last,_,setup) + -- ctx_btxsetfirst(first.author) -- unformatted + ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower) + return true +end + +function citevariants.author(dataset,reference,mark,compress,variant,internal) + processcite(dataset,reference,mark,false,"author",internal,setter,getter) +end + -- authornum local function setter(dataset,tag,entry,internal) @@ -1864,7 +1882,7 @@ local function getter(first,last) end local function compressor(found) - return authorcompressor(found) + return authorcompressor(found) -- can be just an alias end function citevariants.authornum(dataset,reference,mark,compress,variant,internal) diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv index e32e99114..5f8e335fe 100644 --- a/tex/context/base/publ-ini.mkiv +++ b/tex/context/base/publ-ini.mkiv @@ -52,6 +52,7 @@ \def\s!cite {cite} \def\s!nocite {nocite} \def\s!list {list} +\def\s!author {author} \def\v!btxcite {btxcite} \def\v!btxlist {btxlist} \def\v!btxrendering {btxrendering} @@ -94,24 +95,11 @@ \unexpanded\def\stopbtxlistentry {\csname\??constructionstophandler\currentconstructionhandler\endcsname} -% interactivity is handled in setup -% -% \unexpanded\setvalue{\??constructiontexthandler\v!btxlist}% -% {\begingroup -% \ifx\currentbtxnumbering\empty\else -% \startgoto[\s!internal(\currentbtxinternal)]% handled in setup -% \fi -% \directsetup{\v!btxrendering:\v!number:\constructionparameter\c!number}% -% \ifx\currentbtxnumbering\empty\else -% \stopgoto -% \fi -% \endgroup} -% -% so: +% interactivity is handled in setups \unexpanded\setvalue{\??constructiontexthandler\v!btxlist}% {\begingroup - \directsetup{\v!btxrendering:\v!number:\constructionparameter\c!number}% + \fastsetup{\v!btxrendering:\v!number:\constructionparameter\c!number}% \endgroup} % the whole entry can be interactive @@ -147,9 +135,7 @@ \relax \stopsetups -% todo: low level builder commands without using the constructor - -% construction +% construction (todo:low level builder commands without using the constructor) \unexpanded\def\strc_constructions_initialize#1% class instance {\edef\currentconstruction{#1}% @@ -221,7 +207,6 @@ \installcorenamespace {btxcitevariant} \installcorenamespace {btxrendering} \installcorenamespace {btxcommand} -%installcorenamespace {btxnumbering} \installcorenamespace {btxrenderingdefinition} \installcommandhandler \??btxdataset {btxdataset} \??btxdataset @@ -333,15 +318,14 @@ % \let\btxsetdataset\setbtxdataset % \let\btxsetentry \setbtxentry -\def\btxfield #1{\ctxcommand{btxfield("\currentbtxdataset","\currentbtxtag","#1")}} -\def\btxdetail #1{\ctxcommand{btxdetail("\currentbtxdataset","\currentbtxtag","#1")}} -\def\btxflush #1{\ctxcommand{btxflush("\currentbtxdataset","\currentbtxtag","#1")}} -%def\btxrendering#1{\ctxcommand{btxrendering("\currentbtxdataset","\currentbtxtag","#1","\btxrenderingparameter\c!interaction")}} -\def\btxdoifelse #1{\ctxcommand{btxdoifelse("\currentbtxdataset","\currentbtxtag","#1")}} -\def\btxdoif #1{\ctxcommand{btxdoif("\currentbtxdataset","\currentbtxtag","#1")}} -\def\btxdoifnot #1{\ctxcommand{btxdoifnot("\currentbtxdataset","\currentbtxtag","#1")}} +\def\btxfield #1{\ctxcommand{btxfield("\currentbtxdataset","\currentbtxtag","#1")}} +\def\btxdetail #1{\ctxcommand{btxdetail("\currentbtxdataset","\currentbtxtag","#1")}} +\def\btxflush #1{\ctxcommand{btxflush("\currentbtxdataset","\currentbtxtag","#1")}} +\def\btxdoifelse#1{\ctxcommand{btxdoifelse("\currentbtxdataset","\currentbtxtag","#1")}} +\def\btxdoif #1{\ctxcommand{btxdoif("\currentbtxdataset","\currentbtxtag","#1")}} +\def\btxdoifnot #1{\ctxcommand{btxdoifnot("\currentbtxdataset","\currentbtxtag","#1")}} -\let\btxsetup \directsetup +\let\btxsetup\fastsetup %D How complex will we go? Can we assume that e.g. an apa style will not be mixed %D with another one? I think this assumption is okay. For manuals we might want to @@ -361,7 +345,55 @@ \unexpanded\def\btxlbracket {\removeunwantedspaces\space[} \unexpanded\def\btxrbracket {\removeunwantedspaces]\space} -%D Rendering lists and citations. +%D Variables: + +\let\currentbtxbacklink \empty \unexpanded\def\btxsetbacklink {\def\currentbtxbacklink} +\let\currentbtxbacktrace \empty \unexpanded\def\btxsetbacktrace {\def\currentbtxbacktrace} +\let\currentbtxcategory \empty \unexpanded\def\btxsetcategory {\def\currentbtxcategory} +\let\currentbtxcombis \empty \unexpanded\def\btxsetcombis {\def\currentbtxcombis} +\let\currentbtxdataset \empty \unexpanded\def\btxsetdataset {\def\currentbtxdataset} +\let\currentbtxfirst \empty \unexpanded\def\btxsetfirst {\def\currentbtxfirst} +\let\currentbtxfirstnames \empty \unexpanded\def\btxsetfirstnames {\def\currentbtxfirstnames} +\let\currentbtxinitials \empty \unexpanded\def\btxsetinitials {\def\currentbtxinitials} +\let\currentbtxinternal \empty \unexpanded\def\btxsetinternal {\def\currentbtxinternal} +\let\currentbtxjuniors \empty \unexpanded\def\btxsetjuniors {\def\currentbtxjuniors} +\let\currentbtxlanguage \empty \unexpanded\def\btxsetlanguage {\def\currentbtxlanguage} +\let\currentbtxsecond \empty \unexpanded\def\btxsetsecond {\def\currentbtxsecond} +\let\currentbtxsurnames \empty \unexpanded\def\btxsetsurnames {\def\currentbtxsurnames} +\let\currentbtxtag \empty \unexpanded\def\btxsettag {\def\currentbtxtag} +\let\currentbtxvons \empty \unexpanded\def\btxsetvons {\def\currentbtxvons} +\let\currentbtxauthorvariant\v!normal \unexpanded\def\btxsetauthorvariant{\def\currentbtxauthorvariant} + +\newconstant\currentbtxoverflow \unexpanded\def\btxsetoverflow#1{\currentbtxoverflow#1\relax} +\newconstant\currentbtxconcat \unexpanded\def\btxsetconcat #1{\currentbtxconcat #1\relax} +\newconstant\currentbtxcount \unexpanded\def\btxsetcount #1{\currentbtxcount #1\relax} + +\def\currentbtxauthorvariant{normal} + +\unexpanded\def\btxlistreset + {\let\currentbtxcombis \empty + \let\currentbtxcategory \empty + \let\currentbtxinternal \empty + \let\currentbtxbacklink \empty + \let\currentbtxbacktrace\empty + \let\currentbtxlanguage \empty + \let\currentbtxtag \empty + \let\currentbtxdataset \empty} + +\unexpanded\def\btxcitereset % check for less .. not all resets needed + {\let \currentbtxfirst \empty + \let \currentbtxsecond \empty + \let \currentbtxinternal \empty + \let \currentbtxbacklink \empty + \let \currentbtxbacktrace\empty % not used here + \let \currentbtxlanguage \empty + \let \currentbtxdataset \empty + \let \currentbtxtag \empty + \setconstant\currentbtxoverflow \zerocount + \setconstant\currentbtxconcat \zerocount + \setconstant\currentbtxcount \zerocount} + +%D Tracing \newconditional\c_btx_trace @@ -370,6 +402,8 @@ {\settrue \c_btx_trace} {\setfalse\c_btx_trace} +%D Rendering lists and citations. + \unexpanded\def\startbtxrendering {\begingroup \dosingleempty\btx_start_rendering} @@ -396,7 +430,7 @@ %newdimen\d_publ_number_distance \ifdefined\btxblock \else \newcount\btxblock \fi \btxblock\plusone -\ifdefined\btxlistcounter \else \newcount\btxlistcounter \fi % maintaned here, maybe in lua too? +\ifdefined\btxlistcounter \else \newcount\btxlistcounter \fi % maintained here, maybe in lua too? \ifdefined\btxcitecounter \else \newcount\btxcitecounter \fi % maybe pass this to lua \newtoks \everysetupbtxlistplacement % name will change @@ -423,8 +457,6 @@ \fi \to \everydefinebtxrendering -\let\currentbtxcombis\empty % goes into the setups - \unexpanded\def\btx_entry_inject {\begingroup \edef\currentbtxcategory{\btxfield{category}}% @@ -436,7 +468,7 @@ \endgroup} \def\btx_entry_inject_yes - {\directsetup\currentbtxsetup + {\fastsetup\currentbtxsetup \removeunwantedspaces \ifx\currentbtxcombis\empty \else \btxrenderingparameter\c!separator @@ -450,7 +482,7 @@ {\begingroup \def\currentbtxtag{#1}% \ignorespaces - \directsetup{\s!btx:\currentbtxalternative:\currentbtxcategory}% + \fastsetup{\s!btx:\currentbtxalternative:\currentbtxcategory}% \removeunwantedspaces \endgroup} @@ -497,7 +529,7 @@ \def\publ_place_list_indeed {\startbtxrendering[\currentbtxrendering]% - % \directsetup{\btxrenderingparameter\c!setups}% specific initializations + % \fastsetup{\btxrenderingparameter\c!setups}% specific initializations % \determinelistcharacteristics[\currentbtxrendering]% \btx_set_rendering_alternative \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}% @@ -540,36 +572,12 @@ \def\currentbtxblock{\number\btxblock} -\unexpanded\def\btxlistreset - {\let \currentbtxindex \!!zerocount % can be a constant - \let \currentbtxcombis \empty - \let \currentbtxcategory \empty - \let \currentbtxinternal \empty - \let \currentbtxbacklink \empty - \let \currentbtxbacktrace\empty - \let \currentbtxlanguage \empty - \let \currentbtxtag \empty - \let \currentbtxdataset \empty - %setconstant\currentbtxrealpage \zerocount - } - -\unexpanded\def\btxsetindex {\def\currentbtxindex} -\unexpanded\def\btxsetcombis {\def\currentbtxcombis} -\unexpanded\def\btxsetcategory {\def\currentbtxcategory} -\unexpanded\def\btxsetinternal {\def\currentbtxinternal} -\unexpanded\def\btxsetbacklink {\def\currentbtxbacklink} -\unexpanded\def\btxsetbacktrace{\def\currentbtxbacktrace} -%unexpanded\def\btxsetlanguage {\def\currentbtxlanguage} -\unexpanded\def\btxsettag {\def\currentbtxtag} - \unexpanded\def\btxsetlanguage#1% {\def\currentbtxlanguage{#1}% \ifx\currentbtxlanguage\currentlanguage \else \setcurrentlanguage\currentmainlanguage\currentbtxlanguage \fi} -\btxlistreset - % called at the lua end, for determining the width \unexpanded\def\btxchecklistentry @@ -592,7 +600,7 @@ \endgroup} \unexpanded\def\btxlistsetup#1% - {\directsetup{\s!btx:\s!list:#1}} + {\fastsetup{\s!btx:\s!list:#1}} \unexpanded\def\btx_reference_indeed {\begingroup @@ -664,13 +672,17 @@ \def\btx_flush_author_nop {\btx_flush_author{\btxlistvariantparameter\c!author}} \unexpanded\def\btx_flush_author#1#2% - {\edef\currentbtxfield{#2}% + {\begingroup + \edef\currentbtxfield{#2}% \let\currentbtxlistvariant\currentbtxfield \ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","\currentbtxfield",{ combiner = "#1", + kind = "list", + % symbol = ".", etallimit = \number\btxlistvariantparameter\c!etallimit, etaldisplay = \number\btxlistvariantparameter\c!etaldisplay, - })}} + })}% + \endgroup} \unexpanded\def\btxflushauthorname {\btx_flush_author{name}} % #1 \unexpanded\def\btxflushauthornormal {\btx_flush_author{normal}} % #1 @@ -679,11 +691,31 @@ \unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1 \unexpanded\def\currentbtxciteauthor % always author - {\ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","author",{ + {\begingroup + \ctxcommand{btxauthor("\currentbtxdataset","\currentbtxtag","author",{ combiner = "\btxcitevariantparameter\c!authorconversion", + kind = "cite", + % symbol = ".", etallimit = \number\btxcitevariantparameter\c!etallimit, etaldisplay = \number\btxcitevariantparameter\c!etaldisplay, - })}} + })}% + \endgroup} + +\unexpanded\def\btxstartauthor{\begingroup} +\unexpanded\def\btxstopauthor {\endgroup} + +\unexpanded\def\btxciteauthorsetup#1{\fastsetup{\s!btx:\s!cite:\s!author:#1}} +\unexpanded\def\btxlistauthorsetup#1{\fastsetup{\s!btx:\s!list:\s!author:#1}} + +% \btxflushauthor{author} +% \btxflushauthor{artauthor} +% \btxflushauthor{editor} +% +% \btxflushauthor[name]{author} +% \btxflushauthor[normal]{author} +% \btxflushauthor[normalshort]{author} +% \btxflushauthor[inverted]{author} +% \btxflushauthor[invertedshort]{author} % \btxflushauthor{author} % \btxflushauthor{artauthor} @@ -765,24 +797,6 @@ %D following (not user) command. We could tag without injecting a node but this way %D we also store the location, which makes it possible to ask local lists. -% all done at the lua end and using the nocite setup .. also tracing -% -% \unexpanded\def\btxdomarkcitation % called from lua end -% {\dontleavehmode -% \iftrialtypesetting -% \expandafter\gobblethreearguments -% \else -% \expandafter\publ_cite_mark_citation -% \fi} -% -% \def\publ_cite_mark_citation#1#2#3% called from lua end -% {\begingroup -% \edef\currentbtxdataset{#1}% -% \edef\currentbtxtag{#2}% -% \edef\currentbtxbacklink{#3}% -% \btxcitesetup\s!nocite -% \endgroup} - %D \macros{cite,nocite,citation,nocitation,usecitation} %D %D The inline \type {\cite} command creates a (often) short reference to a publication @@ -933,29 +947,8 @@ %D Cite helpers: -\newconstant\currentbtxconcat -\newconstant\currentbtxcount -%newconstant\currentbtxrealpage - -\unexpanded\def\btxcitereset - {\let \currentbtxfirst \empty - \let \currentbtxsecond \empty - \let \currentbtxinternal \empty - \let \currentbtxbacklink \empty - \let \currentbtxbacktrace\empty % not used here - \let \currentbtxlanguage \empty - \let \currentbtxdataset \empty - \let \currentbtxtag \empty - %setconstant\currentbtxrealpage \zerocount - \setconstant\currentbtxconcat \zerocount - \setconstant\currentbtxcount \zerocount} - -\btxcitereset - \unexpanded\def\btxcitesetup#1% - {%\btx_cite_reference_inject - \directsetup{btx:cite:#1}% - \btxcitereset} + {\fastsetup{\s!btx:\s!cite:#1}} % no \btxcitereset as we loose dataset and such \unexpanded\def\btxsetfirst {\def\currentbtxfirst} \unexpanded\def\btxsetsecond {\def\currentbtxsecond} @@ -965,22 +958,24 @@ \unexpanded\def\btxsetinternal {\def\currentbtxinternal} \unexpanded\def\btxsetcount #1{\setconstant\currentbtxcount #1\relax} \unexpanded\def\btxsetconcat #1{\setconstant\currentbtxconcat #1\relax} -%unexpanded\def\btxsetrealpage #1{\setconstant\currentbtxrealpage#1\relax} +\unexpanded\def\btxsetoverflow #1{\setconstant\currentbtxoverflow#1\relax} -\unexpanded\def\btxstartsubcite#1% - {\bgroup - \btxcitereset +\unexpanded\def\btxstartsubcite#1% #1 can go + {\begingroup + \btxcitereset % todo: limited set \def\currentbtxcitevariant{#1}% - \btxcitevariantparameter\c!left} + \btxcitevariantparameter\c!left + \relax} \unexpanded\def\btxstopsubcite - {\btxcitevariantparameter\c!right - \egroup} - -%D List helpers: + {\relax + \btxcitevariantparameter\c!right + \endgroup} -\let\currentbtxindex \!!zerocount -\let\currentbtxcategory\empty +\let\btxstartcite \begingroup +\let\btxstopcite \endgroup +\let\btxstartciteauthor\begingroup +\let\btxstopciteauthor \endgroup %D Whatever helpers: @@ -1201,17 +1196,34 @@ [artauthor] [author] +% Not that efficient but ... + \setupbtxcitevariant - [\c!authorconversion=name, % \btxlistvariantparameter\c!authorconversion, - \c!etallimit =\btxlistvariantparameter\c!etallimit, - \c!etaldisplay =\btxlistvariantparameter\c!etaldisplay] + [\c!namesep=\btxlistvariantparameter\c!namesep, + \c!lastnamesep=\btxlistvariantparameter\c!lastnamesep, + \c!finalnamesep=\btxlistvariantparameter\c!finalnamesep, + \c!firstnamesep=\btxlistvariantparameter\c!firstnamesep, + \c!juniorsep=\btxlistvariantparameter\c!juniorsep, + \c!vonsep=\btxlistvariantparameter\c!vonsep, + \c!initialsep=\btxlistvariantparameter\c!initialsep, + \c!surnamesep=\btxlistvariantparameter\c!surnamesep, + \c!surnameinitialsep=\btxlistvariantparameter\c!surnameinitialsep, + \c!surnamefirstnamesep=\btxlistvariantparameter\c!surnamefirstnamesep, + \c!etallimit=\btxlistvariantparameter\c!etallimit, + \c!etaldisplay=\btxlistvariantparameter\c!etaldisplay, + \c!etaltext=\btxlistvariantparameter\c!etaltext, + \c!monthconversion=\btxlistvariantparameter\c!monthconversion, + \c!authorconversion=\v!name] % Do we want these in the format? Loading them delayed is somewhat messy. -\loadbtxdefinitionfile[apa] -\loadbtxdefinitionfile[cite] -\loadbtxdefinitionfile[list] \loadbtxdefinitionfile[commands] \loadbtxdefinitionfile[definitions] +\loadbtxdefinitionfile[cite] +\loadbtxdefinitionfile[list] +\loadbtxdefinitionfile[author] + +\loadbtxdefinitionfile[apa] + \protect diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf index 8eee56299..95da7c75e 100644 Binary files a/tex/context/base/status-files.pdf and b/tex/context/base/status-files.pdf differ diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf index 7541cdaba..b095cf6bc 100644 Binary files a/tex/context/base/status-lua.pdf and b/tex/context/base/status-lua.pdf differ diff --git a/tex/context/base/strc-con.mkvi b/tex/context/base/strc-con.mkvi index 1862b00a6..b04f9231d 100644 --- a/tex/context/base/strc-con.mkvi +++ b/tex/context/base/strc-con.mkvi @@ -352,10 +352,6 @@ \ifx\p_strc_constructions_align\empty \else \setupalign[\p_strc_constructions_align]% \use... \fi - \edef\p_strc_constructions_indenting{\constructionparameter\c!indenting}% - \ifx\p_strc_constructions_indenting\empty \else - \indenting[\p_strc_constructions_indenting]% \use... - \fi \ifcase\c_strc_constructions_nested_state \c_strc_constructions_nested_state\plusone \or @@ -366,6 +362,11 @@ \edef\p_strc_constructions_headalign{\constructionparameter\c!headalign}% % \directsetup\p_strc_constructions_renderingsetup\relax + % moved to here 2014-07-03 + \edef\p_strc_constructions_indenting{\constructionparameter\c!indenting}% + \ifx\p_strc_constructions_indenting\empty \else + \indenting[\p_strc_constructions_indenting]% \use... + \fi % \dostoptagged % tag \dostarttagged\t!constructioncontent\empty diff --git a/tex/context/base/strc-reg.lua b/tex/context/base/strc-reg.lua index e9186e390..da4ba9b2d 100644 --- a/tex/context/base/strc-reg.lua +++ b/tex/context/base/strc-reg.lua @@ -64,6 +64,10 @@ local a_destination = attributes.private('destination') local absmaxlevel = 5 -- \c_strc_registers_maxlevel +local h_prefixpage = helpers.prefixpage +local h_prefixlastpage = helpers.prefixlastpage +local h_title = helpers.title + local ctx_startregisteroutput = context.startregisteroutput local ctx_stopregisteroutput = context.stopregisteroutput local ctx_startregistersection = context.startregistersection @@ -74,13 +78,115 @@ local ctx_startregisterentry = context.startregisterentry local ctx_stopregisterentry = context.stopregisterentry local ctx_startregisterpages = context.startregisterpages local ctx_stopregisterpages = context.stopregisterpages -local ctx_stopregisterseewords = context.stopregisterseewords local ctx_startregisterseewords = context.startregisterseewords +local ctx_stopregisterseewords = context.stopregisterseewords local ctx_registerentry = context.registerentry local ctx_registerseeword = context.registerseeword local ctx_registerpagerange = context.registerpagerange local ctx_registeronepage = context.registeronepage +-- possible export, but ugly code (overloads) +-- +-- local output, section, entries, nofentries, pages, words, rawtext +-- +-- h_title = function(a,b) rawtext = a end +-- +-- local function ctx_startregisteroutput() +-- output = { } +-- section = nil +-- entries = nil +-- nofentries = nil +-- pages = nil +-- words = nil +-- rawtext = nil +-- end +-- local function ctx_stopregisteroutput() +-- inspect(output) +-- output = nil +-- section = nil +-- entries = nil +-- nofentries = nil +-- pages = nil +-- words = nil +-- rawtext = nil +-- end +-- local function ctx_startregistersection(tag) +-- section = { } +-- output[#output+1] = { +-- section = section, +-- tag = tag, +-- } +-- end +-- local function ctx_stopregistersection() +-- end +-- local function ctx_startregisterentries(n) +-- entries = { } +-- nofentries = 0 +-- section[#section+1] = entries +-- end +-- local function ctx_stopregisterentries() +-- end +-- local function ctx_startregisterentry(n) -- or subentries (nested?) +-- nofentries = nofentries + 1 +-- entry = { } +-- entries[nofentries] = entry +-- end +-- local function ctx_stopregisterentry() +-- nofentries = nofentries - 1 +-- entry = entries[nofentries] +-- end +-- local function ctx_startregisterpages() +-- pages = { } +-- entry.pages = pages +-- end +-- local function ctx_stopregisterpages() +-- end +-- local function ctx_startregisterseewords() +-- words = { } +-- entry.words = words +-- end +-- local function ctx_stopregisterseewords() +-- end +-- local function ctx_registerentry(processor,internal,seeparent,text) +-- text() +-- entry.text = { +-- processor = processor, +-- internal = internal, +-- seeparent = seeparent, +-- text = rawtext, +-- } +-- end +-- local function ctx_registerseeword(i,n,processor,internal,seeindex,seetext) +-- seetext() +-- entry.words[i] = { +-- processor = processor, +-- internal = internal, +-- seeparent = seeparent, +-- seetext = rawtext, +-- } +-- end +-- local function ctx_registerpagerange(fprocessor,finternal,frealpage,lprocessor,linternal,lrealpage) +-- pages[#pages+1] = { +-- first = { +-- processor = fprocessor, +-- internal = finternal, +-- realpage = frealpage, +-- }, +-- last = { +-- processor = lprocessor, +-- internal = linternal, +-- realpage = lrealpage, +-- }, +-- } +-- end +-- local function ctx_registeronepage(processor,internal,realpage) +-- pages[#pages+1] = { +-- processor = processor, +-- internal = internal, +-- realpage = realpage, +-- } +-- end + -- some day we will share registers and lists (although there are some conceptual -- differences in the application of keywords) @@ -716,7 +822,6 @@ function registers.analyze(class,options) context(analyzeregister(class,options)) end - -- todo take conversion from index function registers.userdata(index,name) @@ -734,10 +839,6 @@ end -- todo: ownnumber -local h_prefixpage = helpers.prefixpage -local h_prefixlastpage = helpers.prefixlastpage -local h_title = helpers.title - local function pagerange(f_entry,t_entry,is_last,prefixspec,pagespec) local fer, ter = f_entry.references, t_entry.references ctx_registerpagerange( @@ -1063,7 +1164,8 @@ function registers.flush(data,options,prefixspec,pagespec) local seetext = seeword.text or "" local processor = seeword.processor or (entry.processors and entry.processors[1]) or "" local seeindex = entry.references.seeindex or "" - ctx_registerseeword(i,n,processor,0,seeindex,seetext) + -- ctx_registerseeword(i,nt,processor,0,seeindex,seetext) + ctx_registerseeword(i,nt,processor,0,seeindex,function() h_title(seetext,metadata) end) end ctx_stopregisterseewords() end diff --git a/tex/context/base/util-env.lua b/tex/context/base/util-env.lua index e96a464b0..b72226900 100644 --- a/tex/context/base/util-env.lua +++ b/tex/context/base/util-env.lua @@ -9,11 +9,11 @@ if not modules then modules = { } end modules ['util-env'] = { local allocate, mark = utilities.storage.allocate, utilities.storage.mark local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find -local unquoted, quoted = string.unquoted, string.quoted +local unquoted, quoted, optionalquoted = string.unquoted, string.quoted, string.optionalquoted local concat, insert, remove = table.concat, table.insert, table.remove -environment = environment or { } -local environment = environment +environment = environment or { } +local environment = environment -- precautions @@ -182,26 +182,14 @@ function environment.splitarguments(separator) -- rather special, cut-off before end function environment.reconstructcommandline(arg,noquote) + local resolveprefix = resolvers.resolve -- something rather special arg = arg or environment.originalarguments if noquote and #arg == 1 then - -- we could just do: return unquoted(resolvers.resolve(arg[i])) - local a = arg[1] - a = resolvers.resolve(a) - a = unquoted(a) - return a + return unquoted(resolveprefix and resolveprefix(arg[1]) or arg[1]) elseif #arg > 0 then local result = { } for i=1,#arg do - -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) - local a = arg[i] - a = resolvers.resolve(a) - a = unquoted(a) - a = gsub(a,'"','\\"') -- tricky - if find(a," ",1,true) then - result[#result+1] = quoted(a) - else - result[#result+1] = a - end + result[i] = optionalquoted(resolveprefix and resolveprefix(arg[i]) or resolveprefix) end return concat(result," ") else @@ -238,26 +226,10 @@ end -- print(environment.relativepath("//x")) -- //x -- print(environment.relativepath()) -- e:/tmp --- -- to be tested: --- --- function environment.reconstructcommandline(arg,noquote) --- arg = arg or environment.originalarguments --- if noquote and #arg == 1 then --- return unquoted(resolvers.resolve(arg[1])) --- elseif #arg > 0 then --- local result = { } --- for i=1,#arg do --- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote --- end --- return concat(result," ") --- else --- return "" --- end --- end - if arg then -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later) + local newarg, instring = { }, false for index=1,#arg do diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua index 89d8bb48d..2739a20c4 100644 --- a/tex/context/base/util-str.lua +++ b/tex/context/base/util-str.lua @@ -1094,3 +1094,23 @@ end -- string.formatteds = formatteds -- -- setmetatable(formatteds, { __index = make, __call = use }) + +-- This is a somewhat silly one used in commandline reconstruction but the older +-- method, using a combination of fine, gsub, quoted and unquoted was not that +-- reliable. +-- +-- '"foo"bar \"and " whatever"' => "foo\"bar \"and \" whatever" +-- 'foo"bar \"and " whatever' => "foo\"bar \"and \" whatever" + +local dquote = patterns.dquote -- P('"') +local equote = patterns.escaped + dquote / '\\"' + 1 +local space = patterns.space +local cquote = Cc('"') + +local pattern = + Cs(dquote * (equote - P(-2))^0 * dquote) -- we keep the outer but escape unescaped ones + + Cs(cquote * (equote - space)^0 * space * equote^0 * cquote) -- we escape unescaped ones + +function string.optionalquoted(str) + return lpegmatch(pattern,str) or str +end diff --git a/tex/context/test/pdf-a1b-2005.mkiv b/tex/context/test/pdf-a1b-2005.mkiv index f980e3148..bc970c3f9 100644 --- a/tex/context/test/pdf-a1b-2005.mkiv +++ b/tex/context/test/pdf-a1b-2005.mkiv @@ -1,9 +1,9 @@ % PDF/A-1b:2005 -\enabletrackers[structure.tags,backend.tags] +\enabletrackers[structure.tags,backend.tags,backend.xmp] \setupbackend - [format=PDF/A-1a:2005, + [format=PDF/A-1b:2005, intent=sRGB IEC61966-2.1, % use entry here; otherwise problems with predefined default profile profile=sRGB.icc, % use here level=0] @@ -20,8 +20,6 @@ Text is needed, otherwise tagging base entries are not applied. -\stopchapter - \stoptextcolor %\startTEXpage diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index 5f509be8a..a62958c1c 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 06/27/14 10:53:59 +-- merge date : 07/03/14 14:52:08 do -- begin closure to overcome local limits and interference @@ -978,14 +978,14 @@ local function sortedhash(t,cmp) end local n=0 local m=#s - local function kv(s) + local function kv() if n