diff options
author | Hans Hagen <pragma@wxs.nl> | 2011-06-23 19:25:00 +0200 |
---|---|---|
committer | Hans Hagen <pragma@wxs.nl> | 2011-06-23 19:25:00 +0200 |
commit | bddac3f564687065474739c7face6fcd93ceea69 (patch) | |
tree | 26cb1a2ca4bf35c232da5b74347cb6aa5040a4da | |
parent | 4001733dc25cb6743004db0e9319708481d91135 (diff) | |
download | context-bddac3f564687065474739c7face6fcd93ceea69.tar.gz |
beta 2011.06.23 19:25
98 files changed, 3589 insertions, 2377 deletions
diff --git a/scripts/context/lua/mtx-epub.lua b/scripts/context/lua/mtx-epub.lua index 94a1cc9d3..a4b96d3be 100644 --- a/scripts/context/lua/mtx-epub.lua +++ b/scripts/context/lua/mtx-epub.lua @@ -60,8 +60,9 @@ local package = [[ <metadata xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:opf="http://www.idpf.org/2007/opf"> <dc:title>My Title</dc:title> <dc:language>en</dc:language> - <dc:identifier id="%s" /> + <dc:identifier id="%s" >urn:uuid:%s</dc:identifier> <dc:creator opf:file-as="Self, My" opf:role="aut">MySelf</dc:creator> + <dc:date>%s</dc:date> </metadata> <manifest> @@ -111,8 +112,8 @@ local toc = [[ -- problems is some applications as do names with dashes. Also the -- optional toc is supposed to be there and although id's are by -- concept neutral, there are sometimes hard requirements with respect --- to their name like ncx and toc.ncx). Looks like application xml and --- no real clean standard. +-- to their name like ncx and toc.ncx). Maybe we should stick to 3.0 +-- only. local function dumbid(filename) -- return (string.gsub(os.uuid(),"%-%","")) -- to be tested @@ -121,6 +122,7 @@ end local mimetypes = { xhtml = "application/xhtml+xml", + xml = "application/xhtml+xml", css = "text/css", svg = "image/svg+xml", png = "image/png", @@ -235,7 +237,7 @@ function scripts.epub.make() local idmaker = idmakers[file.extname(root)] or idmakers.default container = format(container,epubroot) - package = format(package,identifier,identifier,concat(used,"\n"),idmaker(root)) + package = format(package,identifier,identifier,os.uuid(),os.date("!%Y-%m-%dT%H:%M:%SZ"),concat(used,"\n"),idmaker(root)) toc = format(toc,identifier,"title",root) io.savedata(file.join(epubpath,"mimetype"),mimetype) diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua index 449c341ab..488594892 100644 --- a/scripts/context/lua/mtxrun.lua +++ b/scripts/context/lua/mtxrun.lua @@ -1114,6 +1114,13 @@ if not modules then modules = { } end modules ['l-lpeg'] = { local lpeg = require("lpeg") +-- tracing (only used when we encounter a problem in integration of lpeg in luatex) + +local report = texio and texio.write_nl or print + + + + local type = type local byte, char = string.byte, string.char @@ -1222,17 +1229,17 @@ patterns.unspacer = ((patterns.spacer^1)/"")^0 patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 patterns.beginline = #(1-newline) -local unquoted = Cs(patterns.unquoted * endofstring) -- not C - -function string.unquoted(str) - return match(unquoted,str) or str -end - --- more efficient: +-- local unquoted = Cs(patterns.unquoted * endofstring) -- not C +-- +-- function string.unquoted(str) +-- return match(unquoted,str) or str +-- end +-- +-- more efficient on long strings: local unquoted = ( - squote * Cs(1 - P(-2)) * squote - + dquote * Cs(1 - P(-2)) * dquote + squote * Cs((1 - P(-2))^0) * squote + + dquote * Cs((1 - P(-2))^0) * dquote ) function string.unquoted(str) @@ -1241,6 +1248,12 @@ end patterns.unquoted = unquoted +-- print(string.unquoted("test")) +-- print(string.unquoted([["t\"est"]])) +-- print(string.unquoted([["t\"est"x]])) +-- print(string.unquoted("\'test\'")) +-- print(string.unquoted('"test"')) +-- print(string.unquoted('"test"')) function lpeg.anywhere(pattern) --slightly adapted from website return P { P(pattern) + 1 * V(1) } -- why so complex? @@ -1667,6 +1680,7 @@ end + end -- of closure do -- create closure to overcome 200 locals limit @@ -2937,12 +2951,19 @@ local nothing = Cc("") local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) +-- we also assume that when we have a scheme, we also have an authority + +local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) +local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) +local pathstr = Cs((escaped+(1- qmark-hash))^0) +local querystr = Cs((escaped+(1- hash))^0) +local fragmentstr = Cs((escaped+(1- endofstring))^0) -local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing -local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing -local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing -local query = qmark * Cs((escaped+(1- hash))^0) + nothing -local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing +local scheme = schemestr * colon + nothing +local authority = slash * slash * authoritystr + nothing +local path = slash * pathstr + nothing +local query = qmark * querystr + nothing +local fragment = hash * fragmentstr + nothing local validurl = scheme * authority * path * query * fragment local parser = Ct(validurl) @@ -2963,11 +2984,14 @@ local function split(str) return (type(str) == "string" and lpegmatch(parser,str)) or str end +local isscheme = schemestr * colon * slash * slash -- this test also assumes authority + local function hasscheme(str) - local scheme = lpegmatch(scheme,str) -- at least one character - return scheme and scheme ~= "" + local scheme = lpegmatch(isscheme,str) -- at least one character + return scheme ~= "" and scheme or false end + -- todo: cache them local rootletter = R("az","AZ") @@ -4815,6 +4839,23 @@ function inspect(i) -- global function end end +-- from the lua book: + +function traceback() + local level = 1 + while true do + local info = debug.getinfo(level, "Sl") + if not info then + break + elseif info.what == "C" then + print(format("%3i : C function",level)) + else + print(format("%3i : [%s]:%d",level,info.short_src,info.currentline)) + end + level = level + 1 + end +end + end -- of closure @@ -6821,9 +6862,7 @@ local function handle_any_entity(str) if resolve then local a = acache[str] -- per instance ! todo if not a then -print(">1",str,a) a = resolve_predefined and predefined_simplified[str] -print(">2",str,a) if a then -- one of the predefined elseif type(resolve) == "function" then @@ -6831,7 +6870,6 @@ print(">2",str,a) else a = entities[str] end -print(">3",str,a) if a then if type(a) == "function" then if trace_entities then @@ -6839,9 +6877,7 @@ if type(a) == "function" then end a = a(str) or "" end -print(">4",str,a) a = lpegmatch(parsedentity,a) or a -print(">5",str,a) if trace_entities then report_xml("resolved entity &%s; -> %s (internal)",str,a) end @@ -10156,19 +10192,6 @@ local resolvers = resolvers -- all, when working on the main resolver code, I don't want to scroll -- past this every time. See data-obs.lua for the gsub variant. --- {a,b,c,d} --- a,b,c/{p,q,r},d --- a,b,c/{p,q,r}/d/{x,y,z}// --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a{b,c}{d,e}f --- {a,b,c,d} --- {a,b,c/{p,q,r},d} --- {a,b,c/{p,q,r}/d/{x,y,z}//} --- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} --- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} --- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} - local function f_first(a,b) local t, n = { }, 0 for s in gmatch(b,"[^,]+") do @@ -10260,6 +10283,19 @@ function resolvers.expandedpathfromlist(pathlist) return newlist end +-- {a,b,c,d} +-- a,b,c/{p,q,r},d +-- a,b,c/{p,q,r}/d/{x,y,z}// +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a{b,c}{d,e}f +-- {a,b,c,d} +-- {a,b,c/{p,q,r},d} +-- {a,b,c/{p,q,r}/d/{x,y,z}//} +-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} +-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} +-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} + local cleanup = lpeg.replacer { { "!" , "" }, { "\\" , "/" }, @@ -10427,9 +10463,21 @@ local function scan(files,spec,path,n,m,r) return files, n, m, r end -function resolvers.scanfiles(path,branch) +local cache = { } + +function resolvers.scanfiles(path,branch,usecache) + statistics.starttiming(cache) + if usecache then + local files = cache[path] + if files then + if trace_locating then + report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path) + end + return files + end + end if trace_locating then - report_expansions("scanning path '%s', branch '%s'",path, branch or path) + report_expansions("scanning path '%s', branch '%s'",path,branch or path) end local realpath = resolvers.resolve(path) -- no shortcut local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) @@ -10440,9 +10488,18 @@ function resolvers.scanfiles(path,branch) if trace_locating then report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) end + if usecache then + cache[path] = files + end + statistics.stoptiming(cache) return files end +function resolvers.scantime() + return statistics.elapsedtime(cache) +end + + end -- of closure @@ -10831,7 +10888,7 @@ local function identify() end elseif not writable and caches.force then local cacheparent = file.dirname(cachepath) - if file.is_writable(cacheparent) then + if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths) if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then mkdirs(cachepath) if isdir(cachepath) and file.is_writable(cachepath) then @@ -11288,7 +11345,7 @@ if not modules then modules = { } end modules ['data-res'] = { -- instance but for practical purposes we now avoid this and use a -- instance variable. We always have one instance active (sort of global). --- todo: cache:/// home:/// +-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012) local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys @@ -11641,19 +11698,19 @@ local function load_configuration_files() if blob then local setups = instance.setups local data = blob() -local parent = data and data.parent -if parent then - local filename = filejoin(pathname,parent) - local realname = resolvers.resolve(filename) -- no shortcut - local blob = loadfile(realname) - if blob then - local parentdata = blob() - if parentdata then - report_resolving("loading configuration file '%s'",filename) - data = table.merged(parentdata,data) - end - end -end + local parent = data and data.parent + if parent then + local filename = filejoin(pathname,parent) + local realname = resolvers.resolve(filename) -- no shortcut + local blob = loadfile(realname) + if blob then + local parentdata = blob() + if parentdata then + report_resolving("loading configuration file '%s'",filename) + data = table.merged(parentdata,data) + end + end + end data = data and data.content if data then if trace_locating then @@ -11743,14 +11800,14 @@ local function locate_file_databases() local runtime = stripped == path path = resolvers.cleanpath(path) local spec = resolvers.splitmethod(stripped) - if spec.scheme == "cache" or spec.scheme == "file" then - stripped = spec.path - elseif runtime and (spec.noscheme or spec.scheme == "file") then + if runtime and (spec.noscheme or spec.scheme == "file") then stripped = "tree:///" .. stripped + elseif spec.scheme == "cache" or spec.scheme == "file" then + stripped = spec.path end if trace_locating then if runtime then - report_resolving("locating list of '%s' (runtime)",path) + report_resolving("locating list of '%s' (runtime) (%s)",path,stripped) else report_resolving("locating list of '%s' (cached)",path) end @@ -12162,287 +12219,402 @@ end local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) --- this one is split in smaller functions but it needs testing +-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched -local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc) - local result = { } - local stamp = nil - askedformat = askedformat or "" - filename = collapsepath(filename) - -- speed up / beware: format problem - if instance.remember and not allresults then - stamp = filename .. "--" .. askedformat - if instance.found[stamp] then - if trace_locating then - report_resolving("remembered file '%s'",filename) - end - resolvers.registerintrees(filename) -- for tracing used files - return instance.found[stamp] - end - end - if not dangerous[askedformat] then - if isreadable(filename) then - if trace_detail then - report_resolving("file '%s' found directly",filename) - end - if stamp then - instance.found[stamp] = { filename } - end - return { filename } +local collect_instance_files + +local function find_direct(filename,allresults) + if not dangerous[askedformat] and isreadable(filename) then + if trace_detail then + report_resolving("file '%s' found directly",filename) end + return { filename } end +end + +local function find_wildcard(filename,allresults) if find(filename,'%*') then if trace_locating then report_resolving("checking wildcard '%s'", filename) end - result = resolvers.findwildcardfiles(filename) -- we can use th elocal - elseif file.is_qualified_path(filename) then - if isreadable(filename) then - if trace_locating then - report_resolving("qualified name '%s'", filename) - end - result = { filename } - else - local forcedname, ok, suffix = "", false, fileextname(filename) - if suffix == "" then -- why - local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - local s = format_suffixes[i] - forcedname = filename .. "." .. s - if isreadable(forcedname) then - if trace_locating then - report_resolving("no suffix, forcing format filetype '%s'", s) - end - result, ok = { forcedname }, true - break - end + return resolvers.findwildcardfiles(filename) -- we can use the local + end +end + +local function find_qualified(filename,allresults) -- this one will be split too + if not file.is_qualified_path(filename) then + return + end + if trace_locating then + report_resolving("checking qualified name '%s'", filename) + end + if isreadable(filename) then + if trace_detail then + report_resolving("qualified file '%s' found", filename) + end + return { filename } + end + if trace_detail then + report_resolving("locating qualified file '%s'", filename) + end + local forcedname, suffix = "", fileextname(filename) + if suffix == "" then -- why + local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + local s = format_suffixes[i] + forcedname = filename .. "." .. s + if isreadable(forcedname) then + if trace_locating then + report_resolving("no suffix, forcing format filetype '%s'", s) end + return { forcedname } end end - if not ok and suffix ~= "" then - -- try to find in tree (no suffix manipulation), here we search for the - -- matching last part of the name - local basename = filebasename(filename) - local pattern = lpegmatch(preparetreepattern,filename) - -- messy .. to be sorted out - local savedformat = askedformat - local format = savedformat or "" - if format == "" then - askedformat = resolvers.formatofsuffix(suffix) - end - if not format then - askedformat = "othertextfiles" -- kind of everything, maybe texinput is better - end - -- - if basename ~= filename then - local resolved = collect_instance_files(basename,askedformat,allresults) - if #result == 0 then -- shouldn't this be resolved ? - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered,askedformat,allresults) - end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if find(rr,pattern) then - result[#result+1], ok = rr, true - end + end + end + if suffix ~= "" then + -- try to find in tree (no suffix manipulation), here we search for the + -- matching last part of the name + local basename = filebasename(filename) + local pattern = lpegmatch(preparetreepattern,filename) + -- messy .. to be sorted out + local savedformat = askedformat + local format = savedformat or "" + if format == "" then + askedformat = resolvers.formatofsuffix(suffix) + end + if not format then + askedformat = "othertextfiles" -- kind of everything, maybe all + end + -- + if basename ~= filename then + local resolved = collect_instance_files(basename,askedformat,allresults) + if #resolved == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered,askedformat,allresults) + end + end + resolvers.format = savedformat + -- + if #resolved > 0 then + local result = { } + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1] = rr end end - -- a real wildcard: - -- - -- if not ok then - -- local filelist = collect_files({basename}) - -- for f=1,#filelist do - -- local ff = filelist[f][3] or "" - -- if find(ff,pattern) then - -- result[#result+1], ok = ff, true - -- end - -- end - -- end - end - if not ok and trace_locating then - report_resolving("qualified name '%s'", filename) + if #result > 0 then + return result + end end end - else - -- search spec - local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename) - -- -- tricky as filename can be bla.1.2.3 - -- if not suffixmap[ext] then --- probably needs to be done elsewhere too - -- wantedfiles[#wantedfiles+1] = filename + -- a real wildcard: + -- + -- local filelist = collect_files({basename}) + -- result = { } + -- for f=1,#filelist do + -- local ff = filelist[f][3] or "" + -- if find(ff,pattern) then + -- result[#result+1], ok = ff, true + -- end -- end - wantedfiles[#wantedfiles+1] = filename - if askedformat == "" then - if ext == "" or not suffixmap[ext] then - local defaultsuffixes = resolvers.defaultsuffixes - for i=1,#defaultsuffixes do - local forcedname = filename .. '.' .. defaultsuffixes[i] - wantedfiles[#wantedfiles+1] = forcedname - filetype = resolvers.formatofsuffix(forcedname) - if trace_locating then - report_resolving("forcing filetype '%s'",filetype) - end - end - else - filetype = resolvers.formatofsuffix(filename) + -- if #result > 0 then + -- return result + -- end + end +end + +local function find_analyze(filename,askedformat,allresults) + local filetype, wantedfiles, ext = '', { }, fileextname(filename) + -- too tricky as filename can be bla.1.2.3: + -- + -- if not suffixmap[ext] then + -- wantedfiles[#wantedfiles+1] = filename + -- end + wantedfiles[#wantedfiles+1] = filename + if askedformat == "" then + if ext == "" or not suffixmap[ext] then + local defaultsuffixes = resolvers.defaultsuffixes + for i=1,#defaultsuffixes do + local forcedname = filename .. '.' .. defaultsuffixes[i] + wantedfiles[#wantedfiles+1] = forcedname + filetype = resolvers.formatofsuffix(forcedname) if trace_locating then - report_resolving("using suffix based filetype '%s'",filetype) + report_resolving("forcing filetype '%s'",filetype) end end else - if ext == "" or not suffixmap[ext] then - local format_suffixes = suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] - end - end - end - filetype = askedformat + filetype = resolvers.formatofsuffix(filename) if trace_locating then - report_resolving("using given filetype '%s'",filetype) + report_resolving("using suffix based filetype '%s'",filetype) end end - local typespec = resolvers.variableofformat(filetype) - local pathlist = resolvers.expandedpathlist(typespec) - if not pathlist or #pathlist == 0 then - -- no pathlist, access check only / todo == wildcard - if trace_detail then - report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) - end - for k=1,#wantedfiles do - local fname = wantedfiles[k] - if fname and isreadable(fname) then - filename, done = fname, true - result[#result+1] = filejoin('.',fname) - break + else + if ext == "" or not suffixmap[ext] then + local format_suffixes = suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] end end - -- this is actually 'other text files' or 'any' or 'whatever' - local filelist = collect_files(wantedfiles) - local fl = filelist and filelist[1] - if fl then - filename = fl[3] -- not local? - result[#result+1] = resolvers.resolve(filename) - done = true + end + filetype = askedformat + if trace_locating then + report_resolving("using given filetype '%s'",filetype) + end + end + return filetype, wantedfiles +end + +local function check_subpath(fname) + if isreadable(fname) then + if trace_detail then + report_resolving("found '%s' by deep scanning",fname) + end + return fname + end +end + +local function find_intree(filename,filetype,wantedfiles,allresults) + local typespec = resolvers.variableofformat(filetype) + local pathlist = resolvers.expandedpathlist(typespec) + if pathlist and #pathlist > 0 then + -- list search + local filelist = collect_files(wantedfiles) + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble end - else - -- list search - local filelist = collect_files(wantedfiles) - local dirlist = { } + end + if trace_detail then + report_resolving("checking filename '%s'",filename) + end + local result = { } + for k=1,#pathlist do + local path = pathlist[k] + local pathname = lpegmatch(inhibitstripper,path) + local doscan = path == pathname -- no ^!! + if not find (pathname,'//$') then + doscan = false -- we check directly on the path + end + local done = false + -- using file list if filelist then - for i=1,#filelist do - dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble - end - end - if trace_detail then - report_resolving("checking filename '%s'",filename) - end - for k=1,#pathlist do - local path = pathlist[k] - local pathname = lpegmatch(inhibitstripper,path) - local doscan = path == pathname -- no ^!! - done = false - -- using file list - if filelist then - -- compare list entries with permitted pattern -- /xx /xx// - local expression = makepathexpression(pathname) - if trace_detail then - report_resolving("using pattern '%s' for path '%s'",expression,pathname) - end - for k=1,#filelist do - local fl = filelist[k] - local f = fl[2] - local d = dirlist[k] - if find(d,expression) then - -- todo, test for readable - result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut - done = true - if allresults then - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) - end - else - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) - end - break + -- compare list entries with permitted pattern -- /xx /xx// + local expression = makepathexpression(pathname) + if trace_detail then + report_resolving("using pattern '%s' for path '%s'",expression,pathname) + end + for k=1,#filelist do + local fl = filelist[k] + local f = fl[2] + local d = dirlist[k] + if find(d,expression) then + -- todo, test for readable + result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut + done = true + if allresults then + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) + end + else + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) end - elseif trace_detail then - report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) + break end + elseif trace_detail then + report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) end end - if not done and doscan then - -- check if on disk / unchecked / does not work at all / also zips - local scheme = url.hasscheme(pathname) - if not scheme or scheme == "file" then - local pname = gsub(pathname,"%.%*$",'') - if not find(pname,"%*") then - local ppname = gsub(pname,"/+$","") - if can_be_dir(ppname) then + end + if not done then + pathname = gsub(pathname,"/+$","") + pathname = resolvers.resolve(pathname) + local scheme = url.hasscheme(pathname) + if not scheme or scheme == "file" then + local pname = gsub(pathname,"%.%*$",'') + if not find(pname,"%*") then + if can_be_dir(pname) then + -- quick root scan first + for k=1,#wantedfiles do + local w = wantedfiles[k] + local fname = check_subpath(filejoin(pname,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + if not done and doscan then + -- collect files in path (and cache the result) + local files = resolvers.scanfiles(pname,false,true) for k=1,#wantedfiles do local w = wantedfiles[k] - local fname = filejoin(ppname,w) - if isreadable(fname) then - if trace_detail then - report_resolving("found '%s' by scanning",fname) + local subpath = files[w] + if not subpath or subpath == "" then + -- rootscan already done + elseif type(subpath) == "string" then + local fname = check_subpath(filejoin(ppname,subpath,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + else + for i=1,#subpath do + local sp = subpath[i] + if sp == "" then + -- roottest already done + else + local fname = check_subpath(filejoin(ppname,sp,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + end + if done and not allresults then + break end - result[#result+1] = fname - done = true - if not allresults then break end end end - else - -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end + else + -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end - if not done and doscan then - -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF - end - if done and not allresults then break end + end + -- todo recursive scanning + if done and not allresults then + return #result > 0 and result end end end - for k=1,#result do - local rk = collapsepath(result[k]) - result[k] = rk - resolvers.registerintrees(rk) -- for tracing used files +end + +local function find_onpath(filename,filetype,wantedfiles,allresults) + if trace_detail then + report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end - if stamp then - instance.found[stamp] = result + local result = { } + for k=1,#wantedfiles do + local fname = wantedfiles[k] + if fname and isreadable(fname) then + filename = fname + result[#result+1] = filejoin('.',fname) + if not allresults then + break + end + end end - return result + return #result > 0 and result end --- -- -- begin of main file search routing -- -- -- - - - - - - - - +local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever + local filelist = collect_files(wantedfiles) + local fl = filelist and filelist[1] + if fl then + return { resolvers.resolve(fl[3]) } -- filename + end +end +collect_instance_files = function(filename,askedformat,allresults) -- uses nested + local result, stamp, filetype, wantedfiles + askedformat = askedformat or "" + filename = collapsepath(filename) + if allresults then + -- no need for caching, only used for tracing + local filetype, wantedfiles = find_analyze(filename,askedformat) + local results = { + { method = "direct", list = find_direct (filename,stamp,true) }, + { method = "wildcard", list = find_wildcard (filename,true) }, + { method = "qualified", list = find_qualified(filename,true) }, + { method = "in tree", list = find_intree (filename,filetype,wantedfiles,true) }, + { method = "on path", list = find_onpath (filename,filetype,wantedfiles,true) }, + { method = "otherwise", list = find_otherwise(filename,filetype,wantedfiles,true) }, + } + local result, status, done = { }, { }, { } + for k, r in next, results do + local method, list = r.method, r.list + if list then + for i=1,#list do + local c = collapsepath(list[i]) + if not done[c] then + result[#result+1] = c + done[c] = true + end + status[#status+1] = format("%-10s: %s",method,c) + end + end + end + if trace_detail then + report_resolving("lookup status: %s",table.serialize(status,filename)) + end + return result, status + else + if instance.remember then + stamp = format("%s--%s", filename, askedformat) + result = stamp and instance.found[stamp] + if result then + if trace_locating then + report_resolving("remembered file '%s'",filename) + end + return result + end + end + result = find_direct (filename,stamp) or + find_wildcard (filename) or + find_qualified(filename) + if not result then + local filetype, wantedfiles = find_analyze(filename,askedformat) + result = find_intree (filename,filetype,wantedfiles) or + find_onpath (filename,filetype,wantedfiles) or + find_otherwise(filename,filetype,wantedfiles) + end + if result then + for k=1,#result do + local rk = collapsepath(result[k]) + result[k] = rk + resolvers.registerintrees(rk) -- for tracing used files + end + else + result = { } -- maybe false + end + if stamp then + if trace_locating then + report_resolving("remembering file '%s'",filename) + end + instance.found[stamp] = result + end + return result + end +end -- -- -- end of main file search routing -- -- -- + local function findfiles(filename,filetype,allresults) - local result = collect_instance_files(filename,filetype or "",allresults) - if #result == 0 then + local result, status = collect_instance_files(filename,filetype or "",allresults) + if not result or #result == 0 then local lowered = lower(filename) if filename ~= lowered then - return collect_instance_files(lowered,filetype or "",allresults) + result, status = collect_instance_files(lowered,filetype or "",allresults) end end - return result + return result or { }, status end function resolvers.findfiles(filename,filetype) @@ -12603,6 +12775,10 @@ function resolvers.load(option) return files and next(files) and true end +function resolvers.loadtime() + return statistics.elapsedtime(instance) +end + local function report(str) if trace_locating then report_resolving(str) -- has already verbose @@ -12616,6 +12792,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move if trace_locating then report('') -- ? end + if type(files) == "string" then + files = { files } + end for f=1,#files do local file = files[f] local result = command(file,...) @@ -13031,7 +13210,7 @@ end function generators.file(specification) local path = specification.filename - local content = resolvers.scanfiles(path) + local content = resolvers.scanfiles(path,false,true) -- scan once resolvers.registerfilehash(path,content,true) end @@ -13654,9 +13833,10 @@ end function resolvers.locators.tree(specification) local name = specification.filename - if name ~= '' and lfs.isdir(name) then + local realname = resolvers.resolve(name) -- no shortcut + if realname and realname ~= '' and lfs.isdir(realname) then if trace_locating then - report_trees("locator '%s' found",name) + report_trees("locator '%s' found",realname) end resolvers.appendhash('tree',name,false) -- don't cache elseif trace_locating then @@ -13670,6 +13850,8 @@ function resolvers.hashers.tree(specification) report_trees("analysing '%s'",name) end resolvers.methodhandler("hashers",name) + + resolvers.generators.file(specification) end resolvers.concatinators.tree = resolvers.concatinators.file @@ -14596,7 +14778,7 @@ local helpinfo = [[ --resolve resolve prefixed arguments --ctxlua run internally (using preloaded libs) --internal run script using built in libraries (same as --ctxlua) ---locate locate given filename +--locate locate given filename in database (default) or system (--first --all --detail) --autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree' --tree=pathtotree use given texmf tree (default file: 'setuptex.tmf') @@ -14789,6 +14971,10 @@ function runners.execute_script(fullname,internal,nosplit) environment.ownscript = result dofile(result) else + result = string.quoted(string.unquoted(result)) + -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then + -- result = '"' .. result .. '"' + -- end local binary = runners.applications[file.extname(result)] if binary and binary ~= "" then result = binary .. " " .. result @@ -14899,10 +15085,24 @@ function runners.resolve_string(filename) end end -function runners.locate_file(filename) - -- differs from texmfstart where locate appends .com .exe .bat ... todo +-- differs from texmfstart where locate appends .com .exe .bat ... todo + +function runners.locate_file(filename) -- was given file but only searches in tree if filename and filename ~= "" then - runners.report_location(resolvers.findgivenfile(filename)) + if environment.argument("first") then + runners.report_location(resolvers.findfile(filename)) + -- resolvers.dowithfilesandreport(resolvers.findfile,filename) + elseif environment.argument("all") then + local result, status = resolvers.findfiles(filename) + if status and environment.argument("detail") then + runners.report_location(status) + else + runners.report_location(result) + end + else + runners.report_location(resolvers.findgivenfile(filename)) + -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename) + end end end @@ -14911,12 +15111,12 @@ function runners.locate_platform() end function runners.report_location(result) - if e_verbose then - reportline() - if result and result ~= "" then - report(result) - else - report("not found") + if type(result) == "table" then + for i=1,#result do + if i > 1 then + io.write("\n") + end + io.write(result[i]) end else io.write(result) @@ -14974,8 +15174,8 @@ function resolvers.launch(str) end function runners.launch_file(filename) - instance.allresults = true trackers.enable("resolvers.locating") + local allresults = environment.arguments["all"] local pattern = environment.arguments["pattern"] if not pattern or pattern == "" then pattern = filename @@ -14983,15 +15183,15 @@ function runners.launch_file(filename) if not pattern or pattern == "" then report("provide name or --pattern=") else - local t = resolvers.findfiles(pattern) + local t = resolvers.findfiles(pattern,nil,allresults) if not t or #t == 0 then - t = resolvers.findfiles("*/" .. pattern) + t = resolvers.findfiles("*/" .. pattern,nil,allresults) end if not t or #t == 0 then - t = resolvers.findfiles("*/" .. pattern .. "*") + t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults) end if t and #t > 0 then - if environment.arguments["all"] then + if allresults then for _, v in pairs(t) do report("launching %s", v) resolvers.launch(v) @@ -15367,7 +15567,7 @@ elseif environment.argument("resolve") then elseif environment.argument("locate") then - -- locate file + -- locate file (only database) runners.loadbase() runners.locate_file(filename) @@ -15410,15 +15610,16 @@ elseif environment.argument("find-file") then -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename) resolvers.load() + local e_all = environment.argument("all") local e_pattern = environment.argument("pattern") - local e_format = environment.arguments("format") + local e_format = environment.argument("format") + local finder = e_all and resolvers.findfiles or resolvers.findfile if not e_pattern then runners.register_arguments(filename) environment.initializearguments(environment.arguments_after) - resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format) + resolvers.dowithfilesandreport(finder,environment.files,e_format) elseif type(e_pattern) == "string" then - instance.allresults = true -- brrrr - resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format) + resolvers.dowithfilesandreport(finder,{ e_pattern },e_format) end elseif environment.argument("find-path") then @@ -15499,6 +15700,8 @@ elseif environment.argument("generate") then trackers.enable("resolvers.locating") resolvers.load() + e_verbose = true + elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then -- luatools: runners.execute_ctx_script("mtx-base","--make",filename) diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua index 449c341ab..488594892 100644 --- a/scripts/context/stubs/mswin/mtxrun.lua +++ b/scripts/context/stubs/mswin/mtxrun.lua @@ -1114,6 +1114,13 @@ if not modules then modules = { } end modules ['l-lpeg'] = { local lpeg = require("lpeg") +-- tracing (only used when we encounter a problem in integration of lpeg in luatex) + +local report = texio and texio.write_nl or print + + + + local type = type local byte, char = string.byte, string.char @@ -1222,17 +1229,17 @@ patterns.unspacer = ((patterns.spacer^1)/"")^0 patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 patterns.beginline = #(1-newline) -local unquoted = Cs(patterns.unquoted * endofstring) -- not C - -function string.unquoted(str) - return match(unquoted,str) or str -end - --- more efficient: +-- local unquoted = Cs(patterns.unquoted * endofstring) -- not C +-- +-- function string.unquoted(str) +-- return match(unquoted,str) or str +-- end +-- +-- more efficient on long strings: local unquoted = ( - squote * Cs(1 - P(-2)) * squote - + dquote * Cs(1 - P(-2)) * dquote + squote * Cs((1 - P(-2))^0) * squote + + dquote * Cs((1 - P(-2))^0) * dquote ) function string.unquoted(str) @@ -1241,6 +1248,12 @@ end patterns.unquoted = unquoted +-- print(string.unquoted("test")) +-- print(string.unquoted([["t\"est"]])) +-- print(string.unquoted([["t\"est"x]])) +-- print(string.unquoted("\'test\'")) +-- print(string.unquoted('"test"')) +-- print(string.unquoted('"test"')) function lpeg.anywhere(pattern) --slightly adapted from website return P { P(pattern) + 1 * V(1) } -- why so complex? @@ -1667,6 +1680,7 @@ end + end -- of closure do -- create closure to overcome 200 locals limit @@ -2937,12 +2951,19 @@ local nothing = Cc("") local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) +-- we also assume that when we have a scheme, we also have an authority + +local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) +local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) +local pathstr = Cs((escaped+(1- qmark-hash))^0) +local querystr = Cs((escaped+(1- hash))^0) +local fragmentstr = Cs((escaped+(1- endofstring))^0) -local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing -local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing -local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing -local query = qmark * Cs((escaped+(1- hash))^0) + nothing -local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing +local scheme = schemestr * colon + nothing +local authority = slash * slash * authoritystr + nothing +local path = slash * pathstr + nothing +local query = qmark * querystr + nothing +local fragment = hash * fragmentstr + nothing local validurl = scheme * authority * path * query * fragment local parser = Ct(validurl) @@ -2963,11 +2984,14 @@ local function split(str) return (type(str) == "string" and lpegmatch(parser,str)) or str end +local isscheme = schemestr * colon * slash * slash -- this test also assumes authority + local function hasscheme(str) - local scheme = lpegmatch(scheme,str) -- at least one character - return scheme and scheme ~= "" + local scheme = lpegmatch(isscheme,str) -- at least one character + return scheme ~= "" and scheme or false end + -- todo: cache them local rootletter = R("az","AZ") @@ -4815,6 +4839,23 @@ function inspect(i) -- global function end end +-- from the lua book: + +function traceback() + local level = 1 + while true do + local info = debug.getinfo(level, "Sl") + if not info then + break + elseif info.what == "C" then + print(format("%3i : C function",level)) + else + print(format("%3i : [%s]:%d",level,info.short_src,info.currentline)) + end + level = level + 1 + end +end + end -- of closure @@ -6821,9 +6862,7 @@ local function handle_any_entity(str) if resolve then local a = acache[str] -- per instance ! todo if not a then -print(">1",str,a) a = resolve_predefined and predefined_simplified[str] -print(">2",str,a) if a then -- one of the predefined elseif type(resolve) == "function" then @@ -6831,7 +6870,6 @@ print(">2",str,a) else a = entities[str] end -print(">3",str,a) if a then if type(a) == "function" then if trace_entities then @@ -6839,9 +6877,7 @@ if type(a) == "function" then end a = a(str) or "" end -print(">4",str,a) a = lpegmatch(parsedentity,a) or a -print(">5",str,a) if trace_entities then report_xml("resolved entity &%s; -> %s (internal)",str,a) end @@ -10156,19 +10192,6 @@ local resolvers = resolvers -- all, when working on the main resolver code, I don't want to scroll -- past this every time. See data-obs.lua for the gsub variant. --- {a,b,c,d} --- a,b,c/{p,q,r},d --- a,b,c/{p,q,r}/d/{x,y,z}// --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a{b,c}{d,e}f --- {a,b,c,d} --- {a,b,c/{p,q,r},d} --- {a,b,c/{p,q,r}/d/{x,y,z}//} --- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} --- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} --- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} - local function f_first(a,b) local t, n = { }, 0 for s in gmatch(b,"[^,]+") do @@ -10260,6 +10283,19 @@ function resolvers.expandedpathfromlist(pathlist) return newlist end +-- {a,b,c,d} +-- a,b,c/{p,q,r},d +-- a,b,c/{p,q,r}/d/{x,y,z}// +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a{b,c}{d,e}f +-- {a,b,c,d} +-- {a,b,c/{p,q,r},d} +-- {a,b,c/{p,q,r}/d/{x,y,z}//} +-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} +-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} +-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} + local cleanup = lpeg.replacer { { "!" , "" }, { "\\" , "/" }, @@ -10427,9 +10463,21 @@ local function scan(files,spec,path,n,m,r) return files, n, m, r end -function resolvers.scanfiles(path,branch) +local cache = { } + +function resolvers.scanfiles(path,branch,usecache) + statistics.starttiming(cache) + if usecache then + local files = cache[path] + if files then + if trace_locating then + report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path) + end + return files + end + end if trace_locating then - report_expansions("scanning path '%s', branch '%s'",path, branch or path) + report_expansions("scanning path '%s', branch '%s'",path,branch or path) end local realpath = resolvers.resolve(path) -- no shortcut local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) @@ -10440,9 +10488,18 @@ function resolvers.scanfiles(path,branch) if trace_locating then report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) end + if usecache then + cache[path] = files + end + statistics.stoptiming(cache) return files end +function resolvers.scantime() + return statistics.elapsedtime(cache) +end + + end -- of closure @@ -10831,7 +10888,7 @@ local function identify() end elseif not writable and caches.force then local cacheparent = file.dirname(cachepath) - if file.is_writable(cacheparent) then + if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths) if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then mkdirs(cachepath) if isdir(cachepath) and file.is_writable(cachepath) then @@ -11288,7 +11345,7 @@ if not modules then modules = { } end modules ['data-res'] = { -- instance but for practical purposes we now avoid this and use a -- instance variable. We always have one instance active (sort of global). --- todo: cache:/// home:/// +-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012) local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys @@ -11641,19 +11698,19 @@ local function load_configuration_files() if blob then local setups = instance.setups local data = blob() -local parent = data and data.parent -if parent then - local filename = filejoin(pathname,parent) - local realname = resolvers.resolve(filename) -- no shortcut - local blob = loadfile(realname) - if blob then - local parentdata = blob() - if parentdata then - report_resolving("loading configuration file '%s'",filename) - data = table.merged(parentdata,data) - end - end -end + local parent = data and data.parent + if parent then + local filename = filejoin(pathname,parent) + local realname = resolvers.resolve(filename) -- no shortcut + local blob = loadfile(realname) + if blob then + local parentdata = blob() + if parentdata then + report_resolving("loading configuration file '%s'",filename) + data = table.merged(parentdata,data) + end + end + end data = data and data.content if data then if trace_locating then @@ -11743,14 +11800,14 @@ local function locate_file_databases() local runtime = stripped == path path = resolvers.cleanpath(path) local spec = resolvers.splitmethod(stripped) - if spec.scheme == "cache" or spec.scheme == "file" then - stripped = spec.path - elseif runtime and (spec.noscheme or spec.scheme == "file") then + if runtime and (spec.noscheme or spec.scheme == "file") then stripped = "tree:///" .. stripped + elseif spec.scheme == "cache" or spec.scheme == "file" then + stripped = spec.path end if trace_locating then if runtime then - report_resolving("locating list of '%s' (runtime)",path) + report_resolving("locating list of '%s' (runtime) (%s)",path,stripped) else report_resolving("locating list of '%s' (cached)",path) end @@ -12162,287 +12219,402 @@ end local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) --- this one is split in smaller functions but it needs testing +-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched -local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc) - local result = { } - local stamp = nil - askedformat = askedformat or "" - filename = collapsepath(filename) - -- speed up / beware: format problem - if instance.remember and not allresults then - stamp = filename .. "--" .. askedformat - if instance.found[stamp] then - if trace_locating then - report_resolving("remembered file '%s'",filename) - end - resolvers.registerintrees(filename) -- for tracing used files - return instance.found[stamp] - end - end - if not dangerous[askedformat] then - if isreadable(filename) then - if trace_detail then - report_resolving("file '%s' found directly",filename) - end - if stamp then - instance.found[stamp] = { filename } - end - return { filename } +local collect_instance_files + +local function find_direct(filename,allresults) + if not dangerous[askedformat] and isreadable(filename) then + if trace_detail then + report_resolving("file '%s' found directly",filename) end + return { filename } end +end + +local function find_wildcard(filename,allresults) if find(filename,'%*') then if trace_locating then report_resolving("checking wildcard '%s'", filename) end - result = resolvers.findwildcardfiles(filename) -- we can use th elocal - elseif file.is_qualified_path(filename) then - if isreadable(filename) then - if trace_locating then - report_resolving("qualified name '%s'", filename) - end - result = { filename } - else - local forcedname, ok, suffix = "", false, fileextname(filename) - if suffix == "" then -- why - local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - local s = format_suffixes[i] - forcedname = filename .. "." .. s - if isreadable(forcedname) then - if trace_locating then - report_resolving("no suffix, forcing format filetype '%s'", s) - end - result, ok = { forcedname }, true - break - end + return resolvers.findwildcardfiles(filename) -- we can use the local + end +end + +local function find_qualified(filename,allresults) -- this one will be split too + if not file.is_qualified_path(filename) then + return + end + if trace_locating then + report_resolving("checking qualified name '%s'", filename) + end + if isreadable(filename) then + if trace_detail then + report_resolving("qualified file '%s' found", filename) + end + return { filename } + end + if trace_detail then + report_resolving("locating qualified file '%s'", filename) + end + local forcedname, suffix = "", fileextname(filename) + if suffix == "" then -- why + local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + local s = format_suffixes[i] + forcedname = filename .. "." .. s + if isreadable(forcedname) then + if trace_locating then + report_resolving("no suffix, forcing format filetype '%s'", s) end + return { forcedname } end end - if not ok and suffix ~= "" then - -- try to find in tree (no suffix manipulation), here we search for the - -- matching last part of the name - local basename = filebasename(filename) - local pattern = lpegmatch(preparetreepattern,filename) - -- messy .. to be sorted out - local savedformat = askedformat - local format = savedformat or "" - if format == "" then - askedformat = resolvers.formatofsuffix(suffix) - end - if not format then - askedformat = "othertextfiles" -- kind of everything, maybe texinput is better - end - -- - if basename ~= filename then - local resolved = collect_instance_files(basename,askedformat,allresults) - if #result == 0 then -- shouldn't this be resolved ? - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered,askedformat,allresults) - end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if find(rr,pattern) then - result[#result+1], ok = rr, true - end + end + end + if suffix ~= "" then + -- try to find in tree (no suffix manipulation), here we search for the + -- matching last part of the name + local basename = filebasename(filename) + local pattern = lpegmatch(preparetreepattern,filename) + -- messy .. to be sorted out + local savedformat = askedformat + local format = savedformat or "" + if format == "" then + askedformat = resolvers.formatofsuffix(suffix) + end + if not format then + askedformat = "othertextfiles" -- kind of everything, maybe all + end + -- + if basename ~= filename then + local resolved = collect_instance_files(basename,askedformat,allresults) + if #resolved == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered,askedformat,allresults) + end + end + resolvers.format = savedformat + -- + if #resolved > 0 then + local result = { } + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1] = rr end end - -- a real wildcard: - -- - -- if not ok then - -- local filelist = collect_files({basename}) - -- for f=1,#filelist do - -- local ff = filelist[f][3] or "" - -- if find(ff,pattern) then - -- result[#result+1], ok = ff, true - -- end - -- end - -- end - end - if not ok and trace_locating then - report_resolving("qualified name '%s'", filename) + if #result > 0 then + return result + end end end - else - -- search spec - local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename) - -- -- tricky as filename can be bla.1.2.3 - -- if not suffixmap[ext] then --- probably needs to be done elsewhere too - -- wantedfiles[#wantedfiles+1] = filename + -- a real wildcard: + -- + -- local filelist = collect_files({basename}) + -- result = { } + -- for f=1,#filelist do + -- local ff = filelist[f][3] or "" + -- if find(ff,pattern) then + -- result[#result+1], ok = ff, true + -- end -- end - wantedfiles[#wantedfiles+1] = filename - if askedformat == "" then - if ext == "" or not suffixmap[ext] then - local defaultsuffixes = resolvers.defaultsuffixes - for i=1,#defaultsuffixes do - local forcedname = filename .. '.' .. defaultsuffixes[i] - wantedfiles[#wantedfiles+1] = forcedname - filetype = resolvers.formatofsuffix(forcedname) - if trace_locating then - report_resolving("forcing filetype '%s'",filetype) - end - end - else - filetype = resolvers.formatofsuffix(filename) + -- if #result > 0 then + -- return result + -- end + end +end + +local function find_analyze(filename,askedformat,allresults) + local filetype, wantedfiles, ext = '', { }, fileextname(filename) + -- too tricky as filename can be bla.1.2.3: + -- + -- if not suffixmap[ext] then + -- wantedfiles[#wantedfiles+1] = filename + -- end + wantedfiles[#wantedfiles+1] = filename + if askedformat == "" then + if ext == "" or not suffixmap[ext] then + local defaultsuffixes = resolvers.defaultsuffixes + for i=1,#defaultsuffixes do + local forcedname = filename .. '.' .. defaultsuffixes[i] + wantedfiles[#wantedfiles+1] = forcedname + filetype = resolvers.formatofsuffix(forcedname) if trace_locating then - report_resolving("using suffix based filetype '%s'",filetype) + report_resolving("forcing filetype '%s'",filetype) end end else - if ext == "" or not suffixmap[ext] then - local format_suffixes = suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] - end - end - end - filetype = askedformat + filetype = resolvers.formatofsuffix(filename) if trace_locating then - report_resolving("using given filetype '%s'",filetype) + report_resolving("using suffix based filetype '%s'",filetype) end end - local typespec = resolvers.variableofformat(filetype) - local pathlist = resolvers.expandedpathlist(typespec) - if not pathlist or #pathlist == 0 then - -- no pathlist, access check only / todo == wildcard - if trace_detail then - report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) - end - for k=1,#wantedfiles do - local fname = wantedfiles[k] - if fname and isreadable(fname) then - filename, done = fname, true - result[#result+1] = filejoin('.',fname) - break + else + if ext == "" or not suffixmap[ext] then + local format_suffixes = suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] end end - -- this is actually 'other text files' or 'any' or 'whatever' - local filelist = collect_files(wantedfiles) - local fl = filelist and filelist[1] - if fl then - filename = fl[3] -- not local? - result[#result+1] = resolvers.resolve(filename) - done = true + end + filetype = askedformat + if trace_locating then + report_resolving("using given filetype '%s'",filetype) + end + end + return filetype, wantedfiles +end + +local function check_subpath(fname) + if isreadable(fname) then + if trace_detail then + report_resolving("found '%s' by deep scanning",fname) + end + return fname + end +end + +local function find_intree(filename,filetype,wantedfiles,allresults) + local typespec = resolvers.variableofformat(filetype) + local pathlist = resolvers.expandedpathlist(typespec) + if pathlist and #pathlist > 0 then + -- list search + local filelist = collect_files(wantedfiles) + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble end - else - -- list search - local filelist = collect_files(wantedfiles) - local dirlist = { } + end + if trace_detail then + report_resolving("checking filename '%s'",filename) + end + local result = { } + for k=1,#pathlist do + local path = pathlist[k] + local pathname = lpegmatch(inhibitstripper,path) + local doscan = path == pathname -- no ^!! + if not find (pathname,'//$') then + doscan = false -- we check directly on the path + end + local done = false + -- using file list if filelist then - for i=1,#filelist do - dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble - end - end - if trace_detail then - report_resolving("checking filename '%s'",filename) - end - for k=1,#pathlist do - local path = pathlist[k] - local pathname = lpegmatch(inhibitstripper,path) - local doscan = path == pathname -- no ^!! - done = false - -- using file list - if filelist then - -- compare list entries with permitted pattern -- /xx /xx// - local expression = makepathexpression(pathname) - if trace_detail then - report_resolving("using pattern '%s' for path '%s'",expression,pathname) - end - for k=1,#filelist do - local fl = filelist[k] - local f = fl[2] - local d = dirlist[k] - if find(d,expression) then - -- todo, test for readable - result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut - done = true - if allresults then - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) - end - else - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) - end - break + -- compare list entries with permitted pattern -- /xx /xx// + local expression = makepathexpression(pathname) + if trace_detail then + report_resolving("using pattern '%s' for path '%s'",expression,pathname) + end + for k=1,#filelist do + local fl = filelist[k] + local f = fl[2] + local d = dirlist[k] + if find(d,expression) then + -- todo, test for readable + result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut + done = true + if allresults then + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) + end + else + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) end - elseif trace_detail then - report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) + break end + elseif trace_detail then + report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) end end - if not done and doscan then - -- check if on disk / unchecked / does not work at all / also zips - local scheme = url.hasscheme(pathname) - if not scheme or scheme == "file" then - local pname = gsub(pathname,"%.%*$",'') - if not find(pname,"%*") then - local ppname = gsub(pname,"/+$","") - if can_be_dir(ppname) then + end + if not done then + pathname = gsub(pathname,"/+$","") + pathname = resolvers.resolve(pathname) + local scheme = url.hasscheme(pathname) + if not scheme or scheme == "file" then + local pname = gsub(pathname,"%.%*$",'') + if not find(pname,"%*") then + if can_be_dir(pname) then + -- quick root scan first + for k=1,#wantedfiles do + local w = wantedfiles[k] + local fname = check_subpath(filejoin(pname,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + if not done and doscan then + -- collect files in path (and cache the result) + local files = resolvers.scanfiles(pname,false,true) for k=1,#wantedfiles do local w = wantedfiles[k] - local fname = filejoin(ppname,w) - if isreadable(fname) then - if trace_detail then - report_resolving("found '%s' by scanning",fname) + local subpath = files[w] + if not subpath or subpath == "" then + -- rootscan already done + elseif type(subpath) == "string" then + local fname = check_subpath(filejoin(ppname,subpath,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + else + for i=1,#subpath do + local sp = subpath[i] + if sp == "" then + -- roottest already done + else + local fname = check_subpath(filejoin(ppname,sp,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + end + if done and not allresults then + break end - result[#result+1] = fname - done = true - if not allresults then break end end end - else - -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end + else + -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end - if not done and doscan then - -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF - end - if done and not allresults then break end + end + -- todo recursive scanning + if done and not allresults then + return #result > 0 and result end end end - for k=1,#result do - local rk = collapsepath(result[k]) - result[k] = rk - resolvers.registerintrees(rk) -- for tracing used files +end + +local function find_onpath(filename,filetype,wantedfiles,allresults) + if trace_detail then + report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end - if stamp then - instance.found[stamp] = result + local result = { } + for k=1,#wantedfiles do + local fname = wantedfiles[k] + if fname and isreadable(fname) then + filename = fname + result[#result+1] = filejoin('.',fname) + if not allresults then + break + end + end end - return result + return #result > 0 and result end --- -- -- begin of main file search routing -- -- -- - - - - - - - - +local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever + local filelist = collect_files(wantedfiles) + local fl = filelist and filelist[1] + if fl then + return { resolvers.resolve(fl[3]) } -- filename + end +end +collect_instance_files = function(filename,askedformat,allresults) -- uses nested + local result, stamp, filetype, wantedfiles + askedformat = askedformat or "" + filename = collapsepath(filename) + if allresults then + -- no need for caching, only used for tracing + local filetype, wantedfiles = find_analyze(filename,askedformat) + local results = { + { method = "direct", list = find_direct (filename,stamp,true) }, + { method = "wildcard", list = find_wildcard (filename,true) }, + { method = "qualified", list = find_qualified(filename,true) }, + { method = "in tree", list = find_intree (filename,filetype,wantedfiles,true) }, + { method = "on path", list = find_onpath (filename,filetype,wantedfiles,true) }, + { method = "otherwise", list = find_otherwise(filename,filetype,wantedfiles,true) }, + } + local result, status, done = { }, { }, { } + for k, r in next, results do + local method, list = r.method, r.list + if list then + for i=1,#list do + local c = collapsepath(list[i]) + if not done[c] then + result[#result+1] = c + done[c] = true + end + status[#status+1] = format("%-10s: %s",method,c) + end + end + end + if trace_detail then + report_resolving("lookup status: %s",table.serialize(status,filename)) + end + return result, status + else + if instance.remember then + stamp = format("%s--%s", filename, askedformat) + result = stamp and instance.found[stamp] + if result then + if trace_locating then + report_resolving("remembered file '%s'",filename) + end + return result + end + end + result = find_direct (filename,stamp) or + find_wildcard (filename) or + find_qualified(filename) + if not result then + local filetype, wantedfiles = find_analyze(filename,askedformat) + result = find_intree (filename,filetype,wantedfiles) or + find_onpath (filename,filetype,wantedfiles) or + find_otherwise(filename,filetype,wantedfiles) + end + if result then + for k=1,#result do + local rk = collapsepath(result[k]) + result[k] = rk + resolvers.registerintrees(rk) -- for tracing used files + end + else + result = { } -- maybe false + end + if stamp then + if trace_locating then + report_resolving("remembering file '%s'",filename) + end + instance.found[stamp] = result + end + return result + end +end -- -- -- end of main file search routing -- -- -- + local function findfiles(filename,filetype,allresults) - local result = collect_instance_files(filename,filetype or "",allresults) - if #result == 0 then + local result, status = collect_instance_files(filename,filetype or "",allresults) + if not result or #result == 0 then local lowered = lower(filename) if filename ~= lowered then - return collect_instance_files(lowered,filetype or "",allresults) + result, status = collect_instance_files(lowered,filetype or "",allresults) end end - return result + return result or { }, status end function resolvers.findfiles(filename,filetype) @@ -12603,6 +12775,10 @@ function resolvers.load(option) return files and next(files) and true end +function resolvers.loadtime() + return statistics.elapsedtime(instance) +end + local function report(str) if trace_locating then report_resolving(str) -- has already verbose @@ -12616,6 +12792,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move if trace_locating then report('') -- ? end + if type(files) == "string" then + files = { files } + end for f=1,#files do local file = files[f] local result = command(file,...) @@ -13031,7 +13210,7 @@ end function generators.file(specification) local path = specification.filename - local content = resolvers.scanfiles(path) + local content = resolvers.scanfiles(path,false,true) -- scan once resolvers.registerfilehash(path,content,true) end @@ -13654,9 +13833,10 @@ end function resolvers.locators.tree(specification) local name = specification.filename - if name ~= '' and lfs.isdir(name) then + local realname = resolvers.resolve(name) -- no shortcut + if realname and realname ~= '' and lfs.isdir(realname) then if trace_locating then - report_trees("locator '%s' found",name) + report_trees("locator '%s' found",realname) end resolvers.appendhash('tree',name,false) -- don't cache elseif trace_locating then @@ -13670,6 +13850,8 @@ function resolvers.hashers.tree(specification) report_trees("analysing '%s'",name) end resolvers.methodhandler("hashers",name) + + resolvers.generators.file(specification) end resolvers.concatinators.tree = resolvers.concatinators.file @@ -14596,7 +14778,7 @@ local helpinfo = [[ --resolve resolve prefixed arguments --ctxlua run internally (using preloaded libs) --internal run script using built in libraries (same as --ctxlua) ---locate locate given filename +--locate locate given filename in database (default) or system (--first --all --detail) --autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree' --tree=pathtotree use given texmf tree (default file: 'setuptex.tmf') @@ -14789,6 +14971,10 @@ function runners.execute_script(fullname,internal,nosplit) environment.ownscript = result dofile(result) else + result = string.quoted(string.unquoted(result)) + -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then + -- result = '"' .. result .. '"' + -- end local binary = runners.applications[file.extname(result)] if binary and binary ~= "" then result = binary .. " " .. result @@ -14899,10 +15085,24 @@ function runners.resolve_string(filename) end end -function runners.locate_file(filename) - -- differs from texmfstart where locate appends .com .exe .bat ... todo +-- differs from texmfstart where locate appends .com .exe .bat ... todo + +function runners.locate_file(filename) -- was given file but only searches in tree if filename and filename ~= "" then - runners.report_location(resolvers.findgivenfile(filename)) + if environment.argument("first") then + runners.report_location(resolvers.findfile(filename)) + -- resolvers.dowithfilesandreport(resolvers.findfile,filename) + elseif environment.argument("all") then + local result, status = resolvers.findfiles(filename) + if status and environment.argument("detail") then + runners.report_location(status) + else + runners.report_location(result) + end + else + runners.report_location(resolvers.findgivenfile(filename)) + -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename) + end end end @@ -14911,12 +15111,12 @@ function runners.locate_platform() end function runners.report_location(result) - if e_verbose then - reportline() - if result and result ~= "" then - report(result) - else - report("not found") + if type(result) == "table" then + for i=1,#result do + if i > 1 then + io.write("\n") + end + io.write(result[i]) end else io.write(result) @@ -14974,8 +15174,8 @@ function resolvers.launch(str) end function runners.launch_file(filename) - instance.allresults = true trackers.enable("resolvers.locating") + local allresults = environment.arguments["all"] local pattern = environment.arguments["pattern"] if not pattern or pattern == "" then pattern = filename @@ -14983,15 +15183,15 @@ function runners.launch_file(filename) if not pattern or pattern == "" then report("provide name or --pattern=") else - local t = resolvers.findfiles(pattern) + local t = resolvers.findfiles(pattern,nil,allresults) if not t or #t == 0 then - t = resolvers.findfiles("*/" .. pattern) + t = resolvers.findfiles("*/" .. pattern,nil,allresults) end if not t or #t == 0 then - t = resolvers.findfiles("*/" .. pattern .. "*") + t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults) end if t and #t > 0 then - if environment.arguments["all"] then + if allresults then for _, v in pairs(t) do report("launching %s", v) resolvers.launch(v) @@ -15367,7 +15567,7 @@ elseif environment.argument("resolve") then elseif environment.argument("locate") then - -- locate file + -- locate file (only database) runners.loadbase() runners.locate_file(filename) @@ -15410,15 +15610,16 @@ elseif environment.argument("find-file") then -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename) resolvers.load() + local e_all = environment.argument("all") local e_pattern = environment.argument("pattern") - local e_format = environment.arguments("format") + local e_format = environment.argument("format") + local finder = e_all and resolvers.findfiles or resolvers.findfile if not e_pattern then runners.register_arguments(filename) environment.initializearguments(environment.arguments_after) - resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format) + resolvers.dowithfilesandreport(finder,environment.files,e_format) elseif type(e_pattern) == "string" then - instance.allresults = true -- brrrr - resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format) + resolvers.dowithfilesandreport(finder,{ e_pattern },e_format) end elseif environment.argument("find-path") then @@ -15499,6 +15700,8 @@ elseif environment.argument("generate") then trackers.enable("resolvers.locating") resolvers.load() + e_verbose = true + elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then -- luatools: runners.execute_ctx_script("mtx-base","--make",filename) diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun index 449c341ab..488594892 100755 --- a/scripts/context/stubs/unix/mtxrun +++ b/scripts/context/stubs/unix/mtxrun @@ -1114,6 +1114,13 @@ if not modules then modules = { } end modules ['l-lpeg'] = { local lpeg = require("lpeg") +-- tracing (only used when we encounter a problem in integration of lpeg in luatex) + +local report = texio and texio.write_nl or print + + + + local type = type local byte, char = string.byte, string.char @@ -1222,17 +1229,17 @@ patterns.unspacer = ((patterns.spacer^1)/"")^0 patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 patterns.beginline = #(1-newline) -local unquoted = Cs(patterns.unquoted * endofstring) -- not C - -function string.unquoted(str) - return match(unquoted,str) or str -end - --- more efficient: +-- local unquoted = Cs(patterns.unquoted * endofstring) -- not C +-- +-- function string.unquoted(str) +-- return match(unquoted,str) or str +-- end +-- +-- more efficient on long strings: local unquoted = ( - squote * Cs(1 - P(-2)) * squote - + dquote * Cs(1 - P(-2)) * dquote + squote * Cs((1 - P(-2))^0) * squote + + dquote * Cs((1 - P(-2))^0) * dquote ) function string.unquoted(str) @@ -1241,6 +1248,12 @@ end patterns.unquoted = unquoted +-- print(string.unquoted("test")) +-- print(string.unquoted([["t\"est"]])) +-- print(string.unquoted([["t\"est"x]])) +-- print(string.unquoted("\'test\'")) +-- print(string.unquoted('"test"')) +-- print(string.unquoted('"test"')) function lpeg.anywhere(pattern) --slightly adapted from website return P { P(pattern) + 1 * V(1) } -- why so complex? @@ -1667,6 +1680,7 @@ end + end -- of closure do -- create closure to overcome 200 locals limit @@ -2937,12 +2951,19 @@ local nothing = Cc("") local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) +-- we also assume that when we have a scheme, we also have an authority + +local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) +local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) +local pathstr = Cs((escaped+(1- qmark-hash))^0) +local querystr = Cs((escaped+(1- hash))^0) +local fragmentstr = Cs((escaped+(1- endofstring))^0) -local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing -local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing -local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing -local query = qmark * Cs((escaped+(1- hash))^0) + nothing -local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing +local scheme = schemestr * colon + nothing +local authority = slash * slash * authoritystr + nothing +local path = slash * pathstr + nothing +local query = qmark * querystr + nothing +local fragment = hash * fragmentstr + nothing local validurl = scheme * authority * path * query * fragment local parser = Ct(validurl) @@ -2963,11 +2984,14 @@ local function split(str) return (type(str) == "string" and lpegmatch(parser,str)) or str end +local isscheme = schemestr * colon * slash * slash -- this test also assumes authority + local function hasscheme(str) - local scheme = lpegmatch(scheme,str) -- at least one character - return scheme and scheme ~= "" + local scheme = lpegmatch(isscheme,str) -- at least one character + return scheme ~= "" and scheme or false end + -- todo: cache them local rootletter = R("az","AZ") @@ -4815,6 +4839,23 @@ function inspect(i) -- global function end end +-- from the lua book: + +function traceback() + local level = 1 + while true do + local info = debug.getinfo(level, "Sl") + if not info then + break + elseif info.what == "C" then + print(format("%3i : C function",level)) + else + print(format("%3i : [%s]:%d",level,info.short_src,info.currentline)) + end + level = level + 1 + end +end + end -- of closure @@ -6821,9 +6862,7 @@ local function handle_any_entity(str) if resolve then local a = acache[str] -- per instance ! todo if not a then -print(">1",str,a) a = resolve_predefined and predefined_simplified[str] -print(">2",str,a) if a then -- one of the predefined elseif type(resolve) == "function" then @@ -6831,7 +6870,6 @@ print(">2",str,a) else a = entities[str] end -print(">3",str,a) if a then if type(a) == "function" then if trace_entities then @@ -6839,9 +6877,7 @@ if type(a) == "function" then end a = a(str) or "" end -print(">4",str,a) a = lpegmatch(parsedentity,a) or a -print(">5",str,a) if trace_entities then report_xml("resolved entity &%s; -> %s (internal)",str,a) end @@ -10156,19 +10192,6 @@ local resolvers = resolvers -- all, when working on the main resolver code, I don't want to scroll -- past this every time. See data-obs.lua for the gsub variant. --- {a,b,c,d} --- a,b,c/{p,q,r},d --- a,b,c/{p,q,r}/d/{x,y,z}// --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a{b,c}{d,e}f --- {a,b,c,d} --- {a,b,c/{p,q,r},d} --- {a,b,c/{p,q,r}/d/{x,y,z}//} --- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} --- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} --- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} - local function f_first(a,b) local t, n = { }, 0 for s in gmatch(b,"[^,]+") do @@ -10260,6 +10283,19 @@ function resolvers.expandedpathfromlist(pathlist) return newlist end +-- {a,b,c,d} +-- a,b,c/{p,q,r},d +-- a,b,c/{p,q,r}/d/{x,y,z}// +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a{b,c}{d,e}f +-- {a,b,c,d} +-- {a,b,c/{p,q,r},d} +-- {a,b,c/{p,q,r}/d/{x,y,z}//} +-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} +-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} +-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} + local cleanup = lpeg.replacer { { "!" , "" }, { "\\" , "/" }, @@ -10427,9 +10463,21 @@ local function scan(files,spec,path,n,m,r) return files, n, m, r end -function resolvers.scanfiles(path,branch) +local cache = { } + +function resolvers.scanfiles(path,branch,usecache) + statistics.starttiming(cache) + if usecache then + local files = cache[path] + if files then + if trace_locating then + report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path) + end + return files + end + end if trace_locating then - report_expansions("scanning path '%s', branch '%s'",path, branch or path) + report_expansions("scanning path '%s', branch '%s'",path,branch or path) end local realpath = resolvers.resolve(path) -- no shortcut local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) @@ -10440,9 +10488,18 @@ function resolvers.scanfiles(path,branch) if trace_locating then report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) end + if usecache then + cache[path] = files + end + statistics.stoptiming(cache) return files end +function resolvers.scantime() + return statistics.elapsedtime(cache) +end + + end -- of closure @@ -10831,7 +10888,7 @@ local function identify() end elseif not writable and caches.force then local cacheparent = file.dirname(cachepath) - if file.is_writable(cacheparent) then + if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths) if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then mkdirs(cachepath) if isdir(cachepath) and file.is_writable(cachepath) then @@ -11288,7 +11345,7 @@ if not modules then modules = { } end modules ['data-res'] = { -- instance but for practical purposes we now avoid this and use a -- instance variable. We always have one instance active (sort of global). --- todo: cache:/// home:/// +-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012) local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys @@ -11641,19 +11698,19 @@ local function load_configuration_files() if blob then local setups = instance.setups local data = blob() -local parent = data and data.parent -if parent then - local filename = filejoin(pathname,parent) - local realname = resolvers.resolve(filename) -- no shortcut - local blob = loadfile(realname) - if blob then - local parentdata = blob() - if parentdata then - report_resolving("loading configuration file '%s'",filename) - data = table.merged(parentdata,data) - end - end -end + local parent = data and data.parent + if parent then + local filename = filejoin(pathname,parent) + local realname = resolvers.resolve(filename) -- no shortcut + local blob = loadfile(realname) + if blob then + local parentdata = blob() + if parentdata then + report_resolving("loading configuration file '%s'",filename) + data = table.merged(parentdata,data) + end + end + end data = data and data.content if data then if trace_locating then @@ -11743,14 +11800,14 @@ local function locate_file_databases() local runtime = stripped == path path = resolvers.cleanpath(path) local spec = resolvers.splitmethod(stripped) - if spec.scheme == "cache" or spec.scheme == "file" then - stripped = spec.path - elseif runtime and (spec.noscheme or spec.scheme == "file") then + if runtime and (spec.noscheme or spec.scheme == "file") then stripped = "tree:///" .. stripped + elseif spec.scheme == "cache" or spec.scheme == "file" then + stripped = spec.path end if trace_locating then if runtime then - report_resolving("locating list of '%s' (runtime)",path) + report_resolving("locating list of '%s' (runtime) (%s)",path,stripped) else report_resolving("locating list of '%s' (cached)",path) end @@ -12162,287 +12219,402 @@ end local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) --- this one is split in smaller functions but it needs testing +-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched -local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc) - local result = { } - local stamp = nil - askedformat = askedformat or "" - filename = collapsepath(filename) - -- speed up / beware: format problem - if instance.remember and not allresults then - stamp = filename .. "--" .. askedformat - if instance.found[stamp] then - if trace_locating then - report_resolving("remembered file '%s'",filename) - end - resolvers.registerintrees(filename) -- for tracing used files - return instance.found[stamp] - end - end - if not dangerous[askedformat] then - if isreadable(filename) then - if trace_detail then - report_resolving("file '%s' found directly",filename) - end - if stamp then - instance.found[stamp] = { filename } - end - return { filename } +local collect_instance_files + +local function find_direct(filename,allresults) + if not dangerous[askedformat] and isreadable(filename) then + if trace_detail then + report_resolving("file '%s' found directly",filename) end + return { filename } end +end + +local function find_wildcard(filename,allresults) if find(filename,'%*') then if trace_locating then report_resolving("checking wildcard '%s'", filename) end - result = resolvers.findwildcardfiles(filename) -- we can use th elocal - elseif file.is_qualified_path(filename) then - if isreadable(filename) then - if trace_locating then - report_resolving("qualified name '%s'", filename) - end - result = { filename } - else - local forcedname, ok, suffix = "", false, fileextname(filename) - if suffix == "" then -- why - local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - local s = format_suffixes[i] - forcedname = filename .. "." .. s - if isreadable(forcedname) then - if trace_locating then - report_resolving("no suffix, forcing format filetype '%s'", s) - end - result, ok = { forcedname }, true - break - end + return resolvers.findwildcardfiles(filename) -- we can use the local + end +end + +local function find_qualified(filename,allresults) -- this one will be split too + if not file.is_qualified_path(filename) then + return + end + if trace_locating then + report_resolving("checking qualified name '%s'", filename) + end + if isreadable(filename) then + if trace_detail then + report_resolving("qualified file '%s' found", filename) + end + return { filename } + end + if trace_detail then + report_resolving("locating qualified file '%s'", filename) + end + local forcedname, suffix = "", fileextname(filename) + if suffix == "" then -- why + local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + local s = format_suffixes[i] + forcedname = filename .. "." .. s + if isreadable(forcedname) then + if trace_locating then + report_resolving("no suffix, forcing format filetype '%s'", s) end + return { forcedname } end end - if not ok and suffix ~= "" then - -- try to find in tree (no suffix manipulation), here we search for the - -- matching last part of the name - local basename = filebasename(filename) - local pattern = lpegmatch(preparetreepattern,filename) - -- messy .. to be sorted out - local savedformat = askedformat - local format = savedformat or "" - if format == "" then - askedformat = resolvers.formatofsuffix(suffix) - end - if not format then - askedformat = "othertextfiles" -- kind of everything, maybe texinput is better - end - -- - if basename ~= filename then - local resolved = collect_instance_files(basename,askedformat,allresults) - if #result == 0 then -- shouldn't this be resolved ? - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered,askedformat,allresults) - end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if find(rr,pattern) then - result[#result+1], ok = rr, true - end + end + end + if suffix ~= "" then + -- try to find in tree (no suffix manipulation), here we search for the + -- matching last part of the name + local basename = filebasename(filename) + local pattern = lpegmatch(preparetreepattern,filename) + -- messy .. to be sorted out + local savedformat = askedformat + local format = savedformat or "" + if format == "" then + askedformat = resolvers.formatofsuffix(suffix) + end + if not format then + askedformat = "othertextfiles" -- kind of everything, maybe all + end + -- + if basename ~= filename then + local resolved = collect_instance_files(basename,askedformat,allresults) + if #resolved == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered,askedformat,allresults) + end + end + resolvers.format = savedformat + -- + if #resolved > 0 then + local result = { } + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1] = rr end end - -- a real wildcard: - -- - -- if not ok then - -- local filelist = collect_files({basename}) - -- for f=1,#filelist do - -- local ff = filelist[f][3] or "" - -- if find(ff,pattern) then - -- result[#result+1], ok = ff, true - -- end - -- end - -- end - end - if not ok and trace_locating then - report_resolving("qualified name '%s'", filename) + if #result > 0 then + return result + end end end - else - -- search spec - local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename) - -- -- tricky as filename can be bla.1.2.3 - -- if not suffixmap[ext] then --- probably needs to be done elsewhere too - -- wantedfiles[#wantedfiles+1] = filename + -- a real wildcard: + -- + -- local filelist = collect_files({basename}) + -- result = { } + -- for f=1,#filelist do + -- local ff = filelist[f][3] or "" + -- if find(ff,pattern) then + -- result[#result+1], ok = ff, true + -- end -- end - wantedfiles[#wantedfiles+1] = filename - if askedformat == "" then - if ext == "" or not suffixmap[ext] then - local defaultsuffixes = resolvers.defaultsuffixes - for i=1,#defaultsuffixes do - local forcedname = filename .. '.' .. defaultsuffixes[i] - wantedfiles[#wantedfiles+1] = forcedname - filetype = resolvers.formatofsuffix(forcedname) - if trace_locating then - report_resolving("forcing filetype '%s'",filetype) - end - end - else - filetype = resolvers.formatofsuffix(filename) + -- if #result > 0 then + -- return result + -- end + end +end + +local function find_analyze(filename,askedformat,allresults) + local filetype, wantedfiles, ext = '', { }, fileextname(filename) + -- too tricky as filename can be bla.1.2.3: + -- + -- if not suffixmap[ext] then + -- wantedfiles[#wantedfiles+1] = filename + -- end + wantedfiles[#wantedfiles+1] = filename + if askedformat == "" then + if ext == "" or not suffixmap[ext] then + local defaultsuffixes = resolvers.defaultsuffixes + for i=1,#defaultsuffixes do + local forcedname = filename .. '.' .. defaultsuffixes[i] + wantedfiles[#wantedfiles+1] = forcedname + filetype = resolvers.formatofsuffix(forcedname) if trace_locating then - report_resolving("using suffix based filetype '%s'",filetype) + report_resolving("forcing filetype '%s'",filetype) end end else - if ext == "" or not suffixmap[ext] then - local format_suffixes = suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] - end - end - end - filetype = askedformat + filetype = resolvers.formatofsuffix(filename) if trace_locating then - report_resolving("using given filetype '%s'",filetype) + report_resolving("using suffix based filetype '%s'",filetype) end end - local typespec = resolvers.variableofformat(filetype) - local pathlist = resolvers.expandedpathlist(typespec) - if not pathlist or #pathlist == 0 then - -- no pathlist, access check only / todo == wildcard - if trace_detail then - report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) - end - for k=1,#wantedfiles do - local fname = wantedfiles[k] - if fname and isreadable(fname) then - filename, done = fname, true - result[#result+1] = filejoin('.',fname) - break + else + if ext == "" or not suffixmap[ext] then + local format_suffixes = suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] end end - -- this is actually 'other text files' or 'any' or 'whatever' - local filelist = collect_files(wantedfiles) - local fl = filelist and filelist[1] - if fl then - filename = fl[3] -- not local? - result[#result+1] = resolvers.resolve(filename) - done = true + end + filetype = askedformat + if trace_locating then + report_resolving("using given filetype '%s'",filetype) + end + end + return filetype, wantedfiles +end + +local function check_subpath(fname) + if isreadable(fname) then + if trace_detail then + report_resolving("found '%s' by deep scanning",fname) + end + return fname + end +end + +local function find_intree(filename,filetype,wantedfiles,allresults) + local typespec = resolvers.variableofformat(filetype) + local pathlist = resolvers.expandedpathlist(typespec) + if pathlist and #pathlist > 0 then + -- list search + local filelist = collect_files(wantedfiles) + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble end - else - -- list search - local filelist = collect_files(wantedfiles) - local dirlist = { } + end + if trace_detail then + report_resolving("checking filename '%s'",filename) + end + local result = { } + for k=1,#pathlist do + local path = pathlist[k] + local pathname = lpegmatch(inhibitstripper,path) + local doscan = path == pathname -- no ^!! + if not find (pathname,'//$') then + doscan = false -- we check directly on the path + end + local done = false + -- using file list if filelist then - for i=1,#filelist do - dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble - end - end - if trace_detail then - report_resolving("checking filename '%s'",filename) - end - for k=1,#pathlist do - local path = pathlist[k] - local pathname = lpegmatch(inhibitstripper,path) - local doscan = path == pathname -- no ^!! - done = false - -- using file list - if filelist then - -- compare list entries with permitted pattern -- /xx /xx// - local expression = makepathexpression(pathname) - if trace_detail then - report_resolving("using pattern '%s' for path '%s'",expression,pathname) - end - for k=1,#filelist do - local fl = filelist[k] - local f = fl[2] - local d = dirlist[k] - if find(d,expression) then - -- todo, test for readable - result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut - done = true - if allresults then - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) - end - else - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) - end - break + -- compare list entries with permitted pattern -- /xx /xx// + local expression = makepathexpression(pathname) + if trace_detail then + report_resolving("using pattern '%s' for path '%s'",expression,pathname) + end + for k=1,#filelist do + local fl = filelist[k] + local f = fl[2] + local d = dirlist[k] + if find(d,expression) then + -- todo, test for readable + result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut + done = true + if allresults then + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) + end + else + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) end - elseif trace_detail then - report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) + break end + elseif trace_detail then + report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) end end - if not done and doscan then - -- check if on disk / unchecked / does not work at all / also zips - local scheme = url.hasscheme(pathname) - if not scheme or scheme == "file" then - local pname = gsub(pathname,"%.%*$",'') - if not find(pname,"%*") then - local ppname = gsub(pname,"/+$","") - if can_be_dir(ppname) then + end + if not done then + pathname = gsub(pathname,"/+$","") + pathname = resolvers.resolve(pathname) + local scheme = url.hasscheme(pathname) + if not scheme or scheme == "file" then + local pname = gsub(pathname,"%.%*$",'') + if not find(pname,"%*") then + if can_be_dir(pname) then + -- quick root scan first + for k=1,#wantedfiles do + local w = wantedfiles[k] + local fname = check_subpath(filejoin(pname,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + if not done and doscan then + -- collect files in path (and cache the result) + local files = resolvers.scanfiles(pname,false,true) for k=1,#wantedfiles do local w = wantedfiles[k] - local fname = filejoin(ppname,w) - if isreadable(fname) then - if trace_detail then - report_resolving("found '%s' by scanning",fname) + local subpath = files[w] + if not subpath or subpath == "" then + -- rootscan already done + elseif type(subpath) == "string" then + local fname = check_subpath(filejoin(ppname,subpath,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + else + for i=1,#subpath do + local sp = subpath[i] + if sp == "" then + -- roottest already done + else + local fname = check_subpath(filejoin(ppname,sp,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + end + if done and not allresults then + break end - result[#result+1] = fname - done = true - if not allresults then break end end end - else - -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end + else + -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end - if not done and doscan then - -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF - end - if done and not allresults then break end + end + -- todo recursive scanning + if done and not allresults then + return #result > 0 and result end end end - for k=1,#result do - local rk = collapsepath(result[k]) - result[k] = rk - resolvers.registerintrees(rk) -- for tracing used files +end + +local function find_onpath(filename,filetype,wantedfiles,allresults) + if trace_detail then + report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end - if stamp then - instance.found[stamp] = result + local result = { } + for k=1,#wantedfiles do + local fname = wantedfiles[k] + if fname and isreadable(fname) then + filename = fname + result[#result+1] = filejoin('.',fname) + if not allresults then + break + end + end end - return result + return #result > 0 and result end --- -- -- begin of main file search routing -- -- -- - - - - - - - - +local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever + local filelist = collect_files(wantedfiles) + local fl = filelist and filelist[1] + if fl then + return { resolvers.resolve(fl[3]) } -- filename + end +end +collect_instance_files = function(filename,askedformat,allresults) -- uses nested + local result, stamp, filetype, wantedfiles + askedformat = askedformat or "" + filename = collapsepath(filename) + if allresults then + -- no need for caching, only used for tracing + local filetype, wantedfiles = find_analyze(filename,askedformat) + local results = { + { method = "direct", list = find_direct (filename,stamp,true) }, + { method = "wildcard", list = find_wildcard (filename,true) }, + { method = "qualified", list = find_qualified(filename,true) }, + { method = "in tree", list = find_intree (filename,filetype,wantedfiles,true) }, + { method = "on path", list = find_onpath (filename,filetype,wantedfiles,true) }, + { method = "otherwise", list = find_otherwise(filename,filetype,wantedfiles,true) }, + } + local result, status, done = { }, { }, { } + for k, r in next, results do + local method, list = r.method, r.list + if list then + for i=1,#list do + local c = collapsepath(list[i]) + if not done[c] then + result[#result+1] = c + done[c] = true + end + status[#status+1] = format("%-10s: %s",method,c) + end + end + end + if trace_detail then + report_resolving("lookup status: %s",table.serialize(status,filename)) + end + return result, status + else + if instance.remember then + stamp = format("%s--%s", filename, askedformat) + result = stamp and instance.found[stamp] + if result then + if trace_locating then + report_resolving("remembered file '%s'",filename) + end + return result + end + end + result = find_direct (filename,stamp) or + find_wildcard (filename) or + find_qualified(filename) + if not result then + local filetype, wantedfiles = find_analyze(filename,askedformat) + result = find_intree (filename,filetype,wantedfiles) or + find_onpath (filename,filetype,wantedfiles) or + find_otherwise(filename,filetype,wantedfiles) + end + if result then + for k=1,#result do + local rk = collapsepath(result[k]) + result[k] = rk + resolvers.registerintrees(rk) -- for tracing used files + end + else + result = { } -- maybe false + end + if stamp then + if trace_locating then + report_resolving("remembering file '%s'",filename) + end + instance.found[stamp] = result + end + return result + end +end -- -- -- end of main file search routing -- -- -- + local function findfiles(filename,filetype,allresults) - local result = collect_instance_files(filename,filetype or "",allresults) - if #result == 0 then + local result, status = collect_instance_files(filename,filetype or "",allresults) + if not result or #result == 0 then local lowered = lower(filename) if filename ~= lowered then - return collect_instance_files(lowered,filetype or "",allresults) + result, status = collect_instance_files(lowered,filetype or "",allresults) end end - return result + return result or { }, status end function resolvers.findfiles(filename,filetype) @@ -12603,6 +12775,10 @@ function resolvers.load(option) return files and next(files) and true end +function resolvers.loadtime() + return statistics.elapsedtime(instance) +end + local function report(str) if trace_locating then report_resolving(str) -- has already verbose @@ -12616,6 +12792,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move if trace_locating then report('') -- ? end + if type(files) == "string" then + files = { files } + end for f=1,#files do local file = files[f] local result = command(file,...) @@ -13031,7 +13210,7 @@ end function generators.file(specification) local path = specification.filename - local content = resolvers.scanfiles(path) + local content = resolvers.scanfiles(path,false,true) -- scan once resolvers.registerfilehash(path,content,true) end @@ -13654,9 +13833,10 @@ end function resolvers.locators.tree(specification) local name = specification.filename - if name ~= '' and lfs.isdir(name) then + local realname = resolvers.resolve(name) -- no shortcut + if realname and realname ~= '' and lfs.isdir(realname) then if trace_locating then - report_trees("locator '%s' found",name) + report_trees("locator '%s' found",realname) end resolvers.appendhash('tree',name,false) -- don't cache elseif trace_locating then @@ -13670,6 +13850,8 @@ function resolvers.hashers.tree(specification) report_trees("analysing '%s'",name) end resolvers.methodhandler("hashers",name) + + resolvers.generators.file(specification) end resolvers.concatinators.tree = resolvers.concatinators.file @@ -14596,7 +14778,7 @@ local helpinfo = [[ --resolve resolve prefixed arguments --ctxlua run internally (using preloaded libs) --internal run script using built in libraries (same as --ctxlua) ---locate locate given filename +--locate locate given filename in database (default) or system (--first --all --detail) --autotree use texmf tree cf. env 'texmfstart_tree' or 'texmfstarttree' --tree=pathtotree use given texmf tree (default file: 'setuptex.tmf') @@ -14789,6 +14971,10 @@ function runners.execute_script(fullname,internal,nosplit) environment.ownscript = result dofile(result) else + result = string.quoted(string.unquoted(result)) + -- if string.match(result,' ') and not string.match(result,"^\".*\"$") then + -- result = '"' .. result .. '"' + -- end local binary = runners.applications[file.extname(result)] if binary and binary ~= "" then result = binary .. " " .. result @@ -14899,10 +15085,24 @@ function runners.resolve_string(filename) end end -function runners.locate_file(filename) - -- differs from texmfstart where locate appends .com .exe .bat ... todo +-- differs from texmfstart where locate appends .com .exe .bat ... todo + +function runners.locate_file(filename) -- was given file but only searches in tree if filename and filename ~= "" then - runners.report_location(resolvers.findgivenfile(filename)) + if environment.argument("first") then + runners.report_location(resolvers.findfile(filename)) + -- resolvers.dowithfilesandreport(resolvers.findfile,filename) + elseif environment.argument("all") then + local result, status = resolvers.findfiles(filename) + if status and environment.argument("detail") then + runners.report_location(status) + else + runners.report_location(result) + end + else + runners.report_location(resolvers.findgivenfile(filename)) + -- resolvers.dowithfilesandreport(resolvers.findgivenfile,filename) + end end end @@ -14911,12 +15111,12 @@ function runners.locate_platform() end function runners.report_location(result) - if e_verbose then - reportline() - if result and result ~= "" then - report(result) - else - report("not found") + if type(result) == "table" then + for i=1,#result do + if i > 1 then + io.write("\n") + end + io.write(result[i]) end else io.write(result) @@ -14974,8 +15174,8 @@ function resolvers.launch(str) end function runners.launch_file(filename) - instance.allresults = true trackers.enable("resolvers.locating") + local allresults = environment.arguments["all"] local pattern = environment.arguments["pattern"] if not pattern or pattern == "" then pattern = filename @@ -14983,15 +15183,15 @@ function runners.launch_file(filename) if not pattern or pattern == "" then report("provide name or --pattern=") else - local t = resolvers.findfiles(pattern) + local t = resolvers.findfiles(pattern,nil,allresults) if not t or #t == 0 then - t = resolvers.findfiles("*/" .. pattern) + t = resolvers.findfiles("*/" .. pattern,nil,allresults) end if not t or #t == 0 then - t = resolvers.findfiles("*/" .. pattern .. "*") + t = resolvers.findfiles("*/" .. pattern .. "*",nil,allresults) end if t and #t > 0 then - if environment.arguments["all"] then + if allresults then for _, v in pairs(t) do report("launching %s", v) resolvers.launch(v) @@ -15367,7 +15567,7 @@ elseif environment.argument("resolve") then elseif environment.argument("locate") then - -- locate file + -- locate file (only database) runners.loadbase() runners.locate_file(filename) @@ -15410,15 +15610,16 @@ elseif environment.argument("find-file") then -- luatools: runners.execute_ctx_script("mtx-base","--find-file",filename) resolvers.load() + local e_all = environment.argument("all") local e_pattern = environment.argument("pattern") - local e_format = environment.arguments("format") + local e_format = environment.argument("format") + local finder = e_all and resolvers.findfiles or resolvers.findfile if not e_pattern then runners.register_arguments(filename) environment.initializearguments(environment.arguments_after) - resolvers.dowithfilesandreport(resolvers.findfiles,environment.files,e_format) + resolvers.dowithfilesandreport(finder,environment.files,e_format) elseif type(e_pattern) == "string" then - instance.allresults = true -- brrrr - resolvers.dowithfilesandreport(resolvers.findfiles,{ e_pattern }, e_format) + resolvers.dowithfilesandreport(finder,{ e_pattern },e_format) end elseif environment.argument("find-path") then @@ -15499,6 +15700,8 @@ elseif environment.argument("generate") then trackers.enable("resolvers.locating") resolvers.load() + e_verbose = true + elseif environment.argument("make") or environment.argument("ini") or environment.argument("compile") then -- luatools: runners.execute_ctx_script("mtx-base","--make",filename) diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua index 2f1377e98..543239126 100644 --- a/tex/context/base/back-exp.lua +++ b/tex/context/base/back-exp.lua @@ -11,7 +11,6 @@ if not modules then modules = { } end modules ['back-exp'] = { -- footnotes -> css 3 -- bodyfont -> in styles.css -- delimited -> left/right string (needs marking) --- depth -> can go away (autodepth now and not used) -- Because we need to look ahead we now always build a tree (this was optional in -- the beginning). The extra overhead in the frontend is neglectable. @@ -124,7 +123,7 @@ local treestack = { } local nesting = { } local currentdepth = 0 -local tree = { data = { }, depth = 0, fulltag == "root" } -- root +local tree = { data = { }, fulltag == "root" } -- root local treeroot = tree local treehash = { } local extras = { } @@ -137,6 +136,7 @@ local restart = false local specialspaces = { [0x20] = " " } -- for conversion local somespace = { [0x20] = true, [" "] = true } -- for testing local entities = { ["&"] = "&", [">"] = ">", ["<"] = "<" } +local attribentities = { ["&"] = "&", [">"] = ">", ["<"] = "<", ['"'] = "quot;" } local defaultnature = "mixed" -- "inline" @@ -151,7 +151,7 @@ end) setmetatableindex(specialspaces, function(t,k) local v = utfchar(k) t[k] = v - entities[v] = format("&#%X;",k) + entities[v] = format("&#x%X;",k) somespace[k] = true somespace[v] = true return v @@ -198,6 +198,14 @@ setmetatableindex(namespaced, function(t,k) end end) +local function attribute(key,value) + if value and value ~= "" then + return format(' %s="%s"',key,gsub(value,".",attribentities)) + else + return "" + end +end + -- local P, C, Cc = lpeg.P, lpeg.C, lpeg.Cc -- -- local dash, colon = P("-"), P(":") @@ -342,7 +350,6 @@ local function makebreaknode(node) -- maybe no fulltag tg = "break", fulltag = "break-" .. nofbreaks, n = nofbreaks, - depth = node.depth, element = "break", nature = "display", -- attributes = breakattributes, @@ -408,7 +415,7 @@ function extras.itemgroup(result,element,detail,n,fulltag,di) end local v = hash.symbol if v then - result[#result+1] = format(" symbol='%s'",v) + result[#result+1] = attribute("symbol",v) end end end @@ -508,8 +515,11 @@ end function extras.image(result,element,detail,n,fulltag,di) local data = usedimages.image[fulltag] if data then - result[#result+1] = format(" id='%s' name='%s' page='%s' width='%s' height='%s'", - fulltag,data.name,data.page,data.width,data.height) + result[#result+1] = attribute("name",data.name) + if tonumber(data.page) > 1 then + result[#result+1] = format("page='%s'",data.page) + end + result[#result+1] = format("id='%s' width='%s' height='%s'",fulltag,data.width,data.height) end end @@ -537,27 +547,27 @@ local specials = { } evaluators.inner = function(result,var) local inner = var.inner if inner then - result[#result+1] = format(" location='%s'",inner) + result[#result+1] = attribute("location",inner) end end evaluators.outer = function(result,var) local file, url = references.checkedfileorurl(var.outer,var.outer) if url then - result[#result+1] = format(" url='%s'",url) + result[#result+1] = attribute("url",url) elseif file then - result[#result+1] = format(" file='%s'",file) + result[#result+1] = attribute("file",file) end end evaluators["outer with inner"] = function(result,var) local file = references.checkedfile(var.f) if file then - result[#result+1] = format(" file='%s'",file) + result[#result+1] = attribute("file",file) end local inner = var.inner if inner then - result[#result+1] = format(" location='%s'",inner) + result[#result+1] = attribute("location",inner) end end @@ -575,23 +585,23 @@ evaluators["special operation with arguments"] = evaluators.special function specials.url(result,var) local url = references.checkedurl(var.operation) if url then - result[#result+1] = format(" url='%s'",url) + result[#result+1] = attribute("url",url) end end function specials.file(result,var) local file = references.checkedfile(var.operation) if file then - result[#result+1] = format(" file='%s'",file) + result[#result+1] = attribute("file",file) end end function specials.fileorurl(result,var) local file, url = references.checkedfileorurl(var.operation,var.operation) if url then - result[#result+1] = format(" url='%s'",url) + result[#result+1] = attribute("url",url) elseif file then - result[#result+1] = format(" file='%s'",file) + result[#result+1] = attribute("file",file) end end @@ -826,7 +836,6 @@ local function checkmath(root) -- we can provide utf.toentities as an option di = { element = "maction", nature = "display", - depth = di.depth, attributes = { actiontype = detail }, data = { di }, n = 0, @@ -867,7 +876,6 @@ local function checkmath(root) -- we can provide utf.toentities as an option -- data = { utfchar(0x2061) }, data = { "⁡" }, nature = "mixed", - depth = di.depth, } } elseif automathapply then -- make function @@ -893,7 +901,6 @@ local function checkmath(root) -- we can provide utf.toentities as an option -- data = { utfchar(0x2061) }, data = { "⁡" }, nature = "mixed", - depth = di.depth, } } end @@ -1487,7 +1494,6 @@ local function push(fulltag,depth) fulltag = fulltag, detail = detail, n = tonumber(n), -- more efficient - depth = depth, element = element, nature = nature, data = { }, @@ -2108,7 +2114,7 @@ local function stopexport(v) report_export("saving css image definitions in '%s",imagefilename) io.savedata(imagefilename,allusedimages(xmlfile)) -- - report_export("saving css style definitions in '%s",cssfile) + report_export("saving css style definitions in '%s",stylefilename) io.savedata(stylefilename,allusedstyles(xmlfile)) -- report_export("saving css template in '%s",templatefilename) diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua index c7a5d66a3..d0da9f592 100644 --- a/tex/context/base/char-ini.lua +++ b/tex/context/base/char-ini.lua @@ -503,7 +503,7 @@ end local temphack = tohash { 0x00A0, - 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, + 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x200C, 0x200D, 0x202F, 0x205F, -- 0xFEFF, diff --git a/tex/context/base/chem-str.mkiv b/tex/context/base/chem-str.mkiv index 0f8044119..6b460b513 100644 --- a/tex/context/base/chem-str.mkiv +++ b/tex/context/base/chem-str.mkiv @@ -204,7 +204,9 @@ \appendtoks \edef\chemicalbodyfont{\chemicalparameter\c!bodyfont}% - \doifnot\chemicalbodyfont\fontbody{\switchtobodyfont[\chemicalbodyfont]}% \fontbody is not expanded (yet) + \ifx\chemicalbodyfont\empty + \switchtobodyfont[\chemicalbodyfont]% + \fi \getvalue{\??cm:\c!size:\chemicalparameter\c!size}% % \to \everystructurechemical \to \everychemical @@ -269,6 +271,8 @@ \mathematics{#1% {\strut\hbox \!!spread 2em{\hss\ctxlua{chemicals.inline(\!!bs#2\!!es)}\hss}}% {\strut\hbox \!!spread 2em{\hss\ctxlua{chemicals.inline(\!!bs#3\!!es)}\hss}}}% +% {\strut\hbox \!!spread 2em{\hss#2\hss}}% +% {\strut\hbox \!!spread 2em{\hss#3\hss}}}% \enspace} % special macros (probably needs some more work) @@ -278,7 +282,7 @@ \setbox0\hbox{\tx\setstrut\strut#3}% \setbox2\hbox{\setstrut\strut\molecule{#4}}% \setbox0\hbox{\raise\dimexpr\dp0+\ht2\relax\hbox to \wd2{#1\box0#2}}% - \smashbox0 +% no: \smashbox0 \hbox{\box0\box2}% \endgroup}% @@ -287,7 +291,7 @@ \setbox0\hbox{\tx\setstrut\strut#3}% \setbox2\hbox{\setstrut\strut#4}% \setbox0\hbox{\lower\dimexpr\dp2+\ht0\relax\hbox to \wd2{#1\box0#2}}% - \smashbox0 +% no: \smashbox0 \hbox{\box0\box2}% \endgroup}% @@ -337,7 +341,7 @@ \let\chemicalsmashedright \chemicalrightcentered \unexpanded\def\chemicaloxidation#1#2#3% - {\chemicaltop{\txx\ifcase#2\relax0\else#1\uppercase\expandafter{\romannumeral#2}\fi}{#3}} + {\chemicaltop{\txx\ifcase#2\relax0\else#1\convertnumber{I}{#2}\fi}{#3}} \unexpanded\def\chemicaloxidationplus {\dotriplegroupempty\chemicaloxidation{\textplus }} % {} needed! \unexpanded\def\chemicaloxidationminus{\dotriplegroupempty\chemicaloxidation{\textminus}} % {} needed! @@ -487,27 +491,51 @@ \unexpanded\def\formulachemical {\relax\dotriplegroupempty\doformulachemical} +% \def\doformulachemical#1#2#3% we could do hboxes and measure +% {\ifthirdargument +% \doifelsenothing{#2}\noformulachemicaltop{\doformulachemicaltop{#2}}% +% \doifelsenothing{#3}\noformulachemicalbot{\doformulachemicalbot{#3}}% +% \else\ifsecondargument +% \noformulachemicaltop +% \doifelsenothing{#2}\noformulachemicalbot{\doformulachemicalbot{#2}}% +% \else +% \noformulachemicaltop +% \noformulachemicalbot +% \fi\fi +% \formulachemicalmid\expandafter{\the\formulachemicalmid\dodochemicalformulamid{#1}&}} +% +% \def\dodochemicalformulamid#1% +% {\ifcsname\??cm::\detokenize{#1}\endcsname +% \csname\??cm::\detokenize{#1}\expandafter\endcsname{}{}% +% \else +% \molecule{#1}{}{}% +% \fi} + +\def\domidformulachemical#1% + {\csname\??cm::\detokenize{#1}\endcsname} + \def\doformulachemical#1#2#3% we could do hboxes and measure - {\ifthirdargument - \doifelsenothing{#2}\noformulachemicaltop{\doformulachemicaltop{#2}}% - \doifelsenothing{#3}\noformulachemicalbot{\doformulachemicalbot{#3}}% - \else\ifsecondargument - \noformulachemicaltop - \doifelsenothing{#2}\noformulachemicalbot{\doformulachemicalbot{#2}}% + {\ifcsname\??cm::\detokenize{#1}\endcsname + \formulachemicalmid\expandafter{\the\formulachemicalmid\domidformulachemical{#1}{#2}{#3}}% \else - \noformulachemicaltop - \noformulachemicalbot - \fi\fi - \formulachemicalmid\expandafter{\the\formulachemicalmid\dodochemicalformulamid{#1}&}} + \ifthirdargument + \doifelsenothing{#2}\noformulachemicaltop{\doformulachemicaltop{#2}}% + \doifelsenothing{#3}\noformulachemicalbot{\doformulachemicalbot{#3}}% + \else\ifsecondargument + \noformulachemicaltop + \doifelsenothing{#2}\noformulachemicalbot{\doformulachemicalbot{#2}}% + \else + \noformulachemicaltop + \noformulachemicalbot + \fi\fi + \formulachemicalmid\expandafter{\the\formulachemicalmid\molecule{#1}&}% + \fi} \def\noformulachemicaltop {\formulachemicaltop\expandafter{\the\formulachemicaltop&}} \def\noformulachemicalbot {\formulachemicalbot\expandafter{\the\formulachemicalbot&}} \def\doformulachemicaltop#1{\formulachemicaltop\expandafter{\the\formulachemicaltop\dodochemicalformulatop{#1}&}\settrue\formulachemicalhastop} \def\doformulachemicalbot#1{\formulachemicalbot\expandafter{\the\formulachemicalbot\dodochemicalformulabot{#1}&}\settrue\formulachemicalhasbot} -\def\dodochemicalformulamid#1% - {\ifcsname\??cm::\detokenize{#1}\endcsname\csname\??cm::\detokenize{#1}\expandafter\endcsname\else\molecule{#1}\fi{}{}} - \def\dodochemicalformulatop#1{\strut#1} \def\dodochemicalformulabot#1{\strut#1} @@ -527,7 +555,7 @@ \c!right=0, \c!top=0, \c!bottom=0, - \c!bodyfont=\the\bodyfontsize, + \c!bodyfont=, \c!scale=\v!medium, \c!size=\v!medium, \c!textsize=\v!big, diff --git a/tex/context/base/cldf-ini.lua b/tex/context/base/cldf-ini.lua index 593fdcd76..b2616b664 100644 --- a/tex/context/base/cldf-ini.lua +++ b/tex/context/base/cldf-ini.lua @@ -221,7 +221,10 @@ function context.viafile(data) -- and other catcode sensitive data if data and data ~= "" then local filename = resolvers.savers.byscheme("virtual","viafile",data) + -- somewhat slow, these regime changes (todo: wrap in one command) +--~ context.startregime { "utf" } context.input(filename) +--~ context.stopregime() end end diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii index b53881d0a..89141ffae 100644 --- a/tex/context/base/cont-new.mkii +++ b/tex/context/base/cont-new.mkii @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2011.06.19 14:17} +\newcontextversion{2011.06.23 19:25} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 623c23eb6..b1f29d26e 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2011.06.19 14:17} +\newcontextversion{2011.06.23 19:25} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii index 32d875b1c..5fb8b948c 100644 --- a/tex/context/base/context.mkii +++ b/tex/context/base/context.mkii @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2011.06.19 14:17} +\edef\contextversion{2011.06.23 19:25} %D For those who want to use this: diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index e7598d892..f00b608af 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2011.06.19 14:17} +\edef\contextversion{2011.06.23 19:25} %D For those who want to use this: diff --git a/tex/context/base/core-def.mkiv b/tex/context/base/core-def.mkiv index 59fdceb85..b341dd91b 100644 --- a/tex/context/base/core-def.mkiv +++ b/tex/context/base/core-def.mkiv @@ -92,4 +92,10 @@ \synchronizelocallinespecs \to \everyswitchtobodyfont +% who knows + +% \appendtoks +% \resetcharacterspacing +% \to \everyhyphenatedurl + \protect \endinput diff --git a/tex/context/base/core-fnt.mkiv b/tex/context/base/core-fnt.mkiv index 76ce03f72..09ebcc15f 100644 --- a/tex/context/base/core-fnt.mkiv +++ b/tex/context/base/core-fnt.mkiv @@ -173,10 +173,15 @@ %D fuzzy, since some \type {\fontdimen}'s are involved to %D determine the optimal placement. +\def\highvfraction {0} +\def\lowvfraction {0} +\def\highlowvfraction{.1} +\def\highlowhfraction{.1} + \def\dodohighlow {\ifx\fontsize\empty \ifmmode - \ifnum\fam<0 \tx \else \holamathfont \fi + \mr \else \tx \fi @@ -187,17 +192,16 @@ \def\dohighlow#1#2#3#4#5#6% todo, named fontdimens tag {\dontleavehmode \bgroup - \scratchdimen\ifdim\fontexheight\textfont2=1ex #2\textfont2\else #3ex\fi - \advance\scratchdimen #4ex - \kern.1ex + \scratchdimen\dimexpr#3ex+#4ex\relax + \kern\highlowhfraction ex \setbox\scratchbox\hbox{#1\scratchdimen\hbox{\dodohighlow\dostarttagged#5\empty#6\dostoptagged}}% \ht\scratchbox\strutheight \dp\scratchbox\strutdepth \box\scratchbox \egroup} -\unexpanded\def\high{\dohighlow\raise\mathsupnormal{.86}{0}\t!sup} -\unexpanded\def\low {\dohighlow\lower\mathsubnormal{.48}{0}\t!sub} +\unexpanded\def\high{\dohighlow\raise\mathsupnormal{.86}\highvfraction\t!sup} +\unexpanded\def\low {\dohighlow\lower\mathsubnormal{.48}\lowvfraction \t!sub} \unexpanded\def\lohi {\dosingleempty\dolohi} @@ -208,9 +212,9 @@ \def\dolohi[#1]#2#3% {\dontleavehmode \hbox - {\dostarttagged\t!subsup - \setbox4\hbox{\dohighlow\lower\mathsubnormal{.48}{.1}\t!sub{#2}}% - \setbox6\hbox{\dohighlow\raise\mathsupnormal{.86}{.1}\t!sup{#3}}% + {\dostarttagged\t!subsup\empty + \setbox4\hbox{\dohighlow\lower\mathsubnormal{.48}\highlowvfraction\t!sub{#2}}% + \setbox6\hbox{\dohighlow\raise\mathsupnormal{.86}\highlowvfraction\t!sup{#3}}% \doif{#1}{\v!left} {\ifdim\wd4<\wd6 \setbox4\hbox to \wd6{\hss\box4}% diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua index 86a287dd4..6083ffc2d 100644 --- a/tex/context/base/data-exp.lua +++ b/tex/context/base/data-exp.lua @@ -26,19 +26,6 @@ local resolvers = resolvers -- all, when working on the main resolver code, I don't want to scroll -- past this every time. See data-obs.lua for the gsub variant. --- {a,b,c,d} --- a,b,c/{p,q,r},d --- a,b,c/{p,q,r}/d/{x,y,z}// --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} --- a{b,c}{d,e}f --- {a,b,c,d} --- {a,b,c/{p,q,r},d} --- {a,b,c/{p,q,r}/d/{x,y,z}//} --- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} --- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} --- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} - local function f_first(a,b) local t, n = { }, 0 for s in gmatch(b,"[^,]+") do @@ -130,6 +117,19 @@ function resolvers.expandedpathfromlist(pathlist) return newlist end +-- {a,b,c,d} +-- a,b,c/{p,q,r},d +-- a,b,c/{p,q,r}/d/{x,y,z}// +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a,b,c/{p,q/{x,y,z},r},d/{p,q,r} +-- a{b,c}{d,e}f +-- {a,b,c,d} +-- {a,b,c/{p,q,r},d} +-- {a,b,c/{p,q,r}/d/{x,y,z}//} +-- {a,b,c/{p,q/{x,y,z}},d/{p,q,r}} +-- {a,b,c/{p,q/{x,y,z},w}v,d/{p,q,r}} +-- {$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,.local,}/web2c} + local cleanup = lpeg.replacer { { "!" , "" }, { "\\" , "/" }, @@ -311,9 +311,21 @@ local function scan(files,spec,path,n,m,r) return files, n, m, r end -function resolvers.scanfiles(path,branch) +local cache = { } + +function resolvers.scanfiles(path,branch,usecache) + statistics.starttiming(cache) + if usecache then + local files = cache[path] + if files then + if trace_locating then + report_expansions("using caches scan of path '%s', branch '%s'",path,branch or path) + end + return files + end + end if trace_locating then - report_expansions("scanning path '%s', branch '%s'",path, branch or path) + report_expansions("scanning path '%s', branch '%s'",path,branch or path) end local realpath = resolvers.resolve(path) -- no shortcut local files, n, m, r = scan({ },realpath .. '/',"",0,0,0) @@ -324,7 +336,16 @@ function resolvers.scanfiles(path,branch) if trace_locating then report_expansions("%s files found on %s directories with %s uppercase remappings",n,m,r) end + if usecache then + cache[path] = files + end + statistics.stoptiming(cache) return files end +function resolvers.scantime() + return statistics.elapsedtime(cache) +end + + --~ print(table.serialize(resolvers.scanfiles("t:/sources"))) diff --git a/tex/context/base/data-fil.lua b/tex/context/base/data-fil.lua index 0e82008f3..6eb29ac32 100644 --- a/tex/context/base/data-fil.lua +++ b/tex/context/base/data-fil.lua @@ -38,7 +38,8 @@ end function generators.file(specification) local path = specification.filename - local content = resolvers.scanfiles(path) + local content = resolvers.scanfiles(path,false,true) -- scan once +--~ inspect(content) resolvers.registerfilehash(path,content,true) end diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua index 695534377..83d30c994 100644 --- a/tex/context/base/data-res.lua +++ b/tex/context/base/data-res.lua @@ -12,7 +12,7 @@ if not modules then modules = { } end modules ['data-res'] = { -- instance but for practical purposes we now avoid this and use a -- instance variable. We always have one instance active (sort of global). --- todo: cache:/// home:/// +-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012) local format, gsub, find, lower, upper, match, gmatch = string.format, string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch local concat, insert, sortedkeys = table.concat, table.insert, table.sortedkeys @@ -373,19 +373,19 @@ local function load_configuration_files() if blob then local setups = instance.setups local data = blob() -local parent = data and data.parent -if parent then - local filename = filejoin(pathname,parent) - local realname = resolvers.resolve(filename) -- no shortcut - local blob = loadfile(realname) - if blob then - local parentdata = blob() - if parentdata then - report_resolving("loading configuration file '%s'",filename) - data = table.merged(parentdata,data) - end - end -end + local parent = data and data.parent + if parent then + local filename = filejoin(pathname,parent) + local realname = resolvers.resolve(filename) -- no shortcut + local blob = loadfile(realname) + if blob then + local parentdata = blob() + if parentdata then + report_resolving("loading configuration file '%s'",filename) + data = table.merged(parentdata,data) + end + end + end data = data and data.content if data then if trace_locating then @@ -475,14 +475,14 @@ local function locate_file_databases() local runtime = stripped == path path = resolvers.cleanpath(path) local spec = resolvers.splitmethod(stripped) - if spec.scheme == "cache" or spec.scheme == "file" then - stripped = spec.path - elseif runtime and (spec.noscheme or spec.scheme == "file") then + if runtime and (spec.noscheme or spec.scheme == "file") then stripped = "tree:///" .. stripped + elseif spec.scheme == "cache" or spec.scheme == "file" then + stripped = spec.path end if trace_locating then if runtime then - report_resolving("locating list of '%s' (runtime)",path) + report_resolving("locating list of '%s' (runtime) (%s)",path,stripped) else report_resolving("locating list of '%s' (cached)",path) end @@ -894,578 +894,402 @@ end local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$")) --- this one is split in smaller functions but it needs testing +-- -- -- begin of main file search routing -- -- -- needs checking as previous has been patched -local function collect_instance_files(filename,askedformat,allresults) -- todo : plugin (scanners, checkers etc) - local result = { } - local stamp = nil - askedformat = askedformat or "" - filename = collapsepath(filename) - -- speed up / beware: format problem - if instance.remember and not allresults then - stamp = filename .. "--" .. askedformat - if instance.found[stamp] then - if trace_locating then - report_resolving("remembered file '%s'",filename) - end - resolvers.registerintrees(filename) -- for tracing used files - return instance.found[stamp] - end - end - if not dangerous[askedformat] then - if isreadable(filename) then - if trace_detail then - report_resolving("file '%s' found directly",filename) - end - if stamp then - instance.found[stamp] = { filename } - end - return { filename } +local collect_instance_files + +local function find_direct(filename,allresults) + if not dangerous[askedformat] and isreadable(filename) then + if trace_detail then + report_resolving("file '%s' found directly",filename) end + return { filename } end +end + +local function find_wildcard(filename,allresults) if find(filename,'%*') then if trace_locating then report_resolving("checking wildcard '%s'", filename) end - result = resolvers.findwildcardfiles(filename) -- we can use th elocal - elseif file.is_qualified_path(filename) then - if isreadable(filename) then - if trace_locating then - report_resolving("qualified name '%s'", filename) - end - result = { filename } - else - local forcedname, ok, suffix = "", false, fileextname(filename) - if suffix == "" then -- why - local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - local s = format_suffixes[i] - forcedname = filename .. "." .. s - if isreadable(forcedname) then - if trace_locating then - report_resolving("no suffix, forcing format filetype '%s'", s) - end - result, ok = { forcedname }, true - break - end + return resolvers.findwildcardfiles(filename) -- we can use the local + end +end + +local function find_qualified(filename,allresults) -- this one will be split too + if not file.is_qualified_path(filename) then + return + end + if trace_locating then + report_resolving("checking qualified name '%s'", filename) + end + if isreadable(filename) then + if trace_detail then + report_resolving("qualified file '%s' found", filename) + end + return { filename } + end + if trace_detail then + report_resolving("locating qualified file '%s'", filename) + end + local forcedname, suffix = "", fileextname(filename) + if suffix == "" then -- why + local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + local s = format_suffixes[i] + forcedname = filename .. "." .. s + if isreadable(forcedname) then + if trace_locating then + report_resolving("no suffix, forcing format filetype '%s'", s) end + return { forcedname } end end - if not ok and suffix ~= "" then - -- try to find in tree (no suffix manipulation), here we search for the - -- matching last part of the name - local basename = filebasename(filename) - local pattern = lpegmatch(preparetreepattern,filename) - -- messy .. to be sorted out - local savedformat = askedformat - local format = savedformat or "" - if format == "" then - askedformat = resolvers.formatofsuffix(suffix) - end - if not format then - askedformat = "othertextfiles" -- kind of everything, maybe texinput is better + end + end + if suffix ~= "" then + -- try to find in tree (no suffix manipulation), here we search for the + -- matching last part of the name + local basename = filebasename(filename) + local pattern = lpegmatch(preparetreepattern,filename) + -- messy .. to be sorted out + local savedformat = askedformat + local format = savedformat or "" + if format == "" then + askedformat = resolvers.formatofsuffix(suffix) + end + if not format then + askedformat = "othertextfiles" -- kind of everything, maybe all + end + -- + if basename ~= filename then + local resolved = collect_instance_files(basename,askedformat,allresults) + if #resolved == 0 then + local lowered = lower(basename) + if filename ~= lowered then + resolved = collect_instance_files(lowered,askedformat,allresults) end - -- - if basename ~= filename then - local resolved = collect_instance_files(basename,askedformat,allresults) - if #result == 0 then -- shouldn't this be resolved ? - local lowered = lower(basename) - if filename ~= lowered then - resolved = collect_instance_files(lowered,askedformat,allresults) - end - end - resolvers.format = savedformat - -- - for r=1,#resolved do - local rr = resolved[r] - if find(rr,pattern) then - result[#result+1], ok = rr, true - end + end + resolvers.format = savedformat + -- + if #resolved > 0 then + local result = { } + for r=1,#resolved do + local rr = resolved[r] + if find(rr,pattern) then + result[#result+1] = rr end end - -- a real wildcard: - -- - -- if not ok then - -- local filelist = collect_files({basename}) - -- for f=1,#filelist do - -- local ff = filelist[f][3] or "" - -- if find(ff,pattern) then - -- result[#result+1], ok = ff, true - -- end - -- end - -- end - end - if not ok and trace_locating then - report_resolving("qualified name '%s'", filename) + if #result > 0 then + return result + end end end - else - -- search spec - local filetype, done, wantedfiles, ext = '', false, { }, fileextname(filename) - -- -- tricky as filename can be bla.1.2.3 - -- if not suffixmap[ext] then --- probably needs to be done elsewhere too - -- wantedfiles[#wantedfiles+1] = filename + -- a real wildcard: + -- + -- local filelist = collect_files({basename}) + -- result = { } + -- for f=1,#filelist do + -- local ff = filelist[f][3] or "" + -- if find(ff,pattern) then + -- result[#result+1], ok = ff, true + -- end -- end - wantedfiles[#wantedfiles+1] = filename - if askedformat == "" then - if ext == "" or not suffixmap[ext] then - local defaultsuffixes = resolvers.defaultsuffixes - for i=1,#defaultsuffixes do - local forcedname = filename .. '.' .. defaultsuffixes[i] - wantedfiles[#wantedfiles+1] = forcedname - filetype = resolvers.formatofsuffix(forcedname) - if trace_locating then - report_resolving("forcing filetype '%s'",filetype) - end - end - else - filetype = resolvers.formatofsuffix(filename) + -- if #result > 0 then + -- return result + -- end + end +end + +local function find_analyze(filename,askedformat,allresults) + local filetype, wantedfiles, ext = '', { }, fileextname(filename) + -- too tricky as filename can be bla.1.2.3: + -- + -- if not suffixmap[ext] then + -- wantedfiles[#wantedfiles+1] = filename + -- end + wantedfiles[#wantedfiles+1] = filename + if askedformat == "" then + if ext == "" or not suffixmap[ext] then + local defaultsuffixes = resolvers.defaultsuffixes + for i=1,#defaultsuffixes do + local forcedname = filename .. '.' .. defaultsuffixes[i] + wantedfiles[#wantedfiles+1] = forcedname + filetype = resolvers.formatofsuffix(forcedname) if trace_locating then - report_resolving("using suffix based filetype '%s'",filetype) + report_resolving("forcing filetype '%s'",filetype) end end else - if ext == "" or not suffixmap[ext] then - local format_suffixes = suffixes[askedformat] - if format_suffixes then - for i=1,#format_suffixes do - wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] - end - end - end - filetype = askedformat + filetype = resolvers.formatofsuffix(filename) if trace_locating then - report_resolving("using given filetype '%s'",filetype) + report_resolving("using suffix based filetype '%s'",filetype) end end - local typespec = resolvers.variableofformat(filetype) - local pathlist = resolvers.expandedpathlist(typespec) - if not pathlist or #pathlist == 0 then - -- no pathlist, access check only / todo == wildcard - if trace_detail then - report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) - end - for k=1,#wantedfiles do - local fname = wantedfiles[k] - if fname and isreadable(fname) then - filename, done = fname, true - result[#result+1] = filejoin('.',fname) - break + else + if ext == "" or not suffixmap[ext] then + local format_suffixes = suffixes[askedformat] + if format_suffixes then + for i=1,#format_suffixes do + wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] end end - -- this is actually 'other text files' or 'any' or 'whatever' - local filelist = collect_files(wantedfiles) - local fl = filelist and filelist[1] - if fl then - filename = fl[3] -- not local? - result[#result+1] = resolvers.resolve(filename) - done = true + end + filetype = askedformat + if trace_locating then + report_resolving("using given filetype '%s'",filetype) + end + end + return filetype, wantedfiles +end + +local function check_subpath(fname) + if isreadable(fname) then + if trace_detail then + report_resolving("found '%s' by deep scanning",fname) + end + return fname + end +end + +local function find_intree(filename,filetype,wantedfiles,allresults) + local typespec = resolvers.variableofformat(filetype) + local pathlist = resolvers.expandedpathlist(typespec) + if pathlist and #pathlist > 0 then + -- list search + local filelist = collect_files(wantedfiles) + local dirlist = { } + if filelist then + for i=1,#filelist do + dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble end - else - -- list search - local filelist = collect_files(wantedfiles) - local dirlist = { } + end + if trace_detail then + report_resolving("checking filename '%s'",filename) + end + local result = { } + for k=1,#pathlist do + local path = pathlist[k] + local pathname = lpegmatch(inhibitstripper,path) + local doscan = path == pathname -- no ^!! + if not find (pathname,'//$') then + doscan = false -- we check directly on the path + end + local done = false + -- using file list if filelist then - for i=1,#filelist do - dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble + -- compare list entries with permitted pattern -- /xx /xx// + local expression = makepathexpression(pathname) + if trace_detail then + report_resolving("using pattern '%s' for path '%s'",expression,pathname) end - end - if trace_detail then - report_resolving("checking filename '%s'",filename) - end - for k=1,#pathlist do - local path = pathlist[k] - local pathname = lpegmatch(inhibitstripper,path) - local doscan = path == pathname -- no ^!! - done = false - -- using file list - if filelist then - -- compare list entries with permitted pattern -- /xx /xx// - local expression = makepathexpression(pathname) - if trace_detail then - report_resolving("using pattern '%s' for path '%s'",expression,pathname) - end - for k=1,#filelist do - local fl = filelist[k] - local f = fl[2] - local d = dirlist[k] - if find(d,expression) then - -- todo, test for readable - result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut - done = true - if allresults then - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) - end - else - if trace_detail then - report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) - end - break + for k=1,#filelist do + local fl = filelist[k] + local f = fl[2] + local d = dirlist[k] + if find(d,expression) then + -- todo, test for readable + result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut + done = true + if allresults then + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) + end + else + if trace_detail then + report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) end - elseif trace_detail then - report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) + break end + elseif trace_detail then + report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) end end - if not done and doscan then - -- check if on disk / unchecked / does not work at all / also zips - local scheme = url.hasscheme(pathname) - if not scheme or scheme == "file" then - local pname = gsub(pathname,"%.%*$",'') - if not find(pname,"%*") then - local ppname = gsub(pname,"/+$","") - if can_be_dir(ppname) then + end + if not done then + pathname = gsub(pathname,"/+$","") + pathname = resolvers.resolve(pathname) + local scheme = url.hasscheme(pathname) + if not scheme or scheme == "file" then + local pname = gsub(pathname,"%.%*$",'') + if not find(pname,"%*") then + if can_be_dir(pname) then + -- quick root scan first + for k=1,#wantedfiles do + local w = wantedfiles[k] + local fname = check_subpath(filejoin(pname,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + if not done and doscan then + -- collect files in path (and cache the result) + local files = resolvers.scanfiles(pname,false,true) for k=1,#wantedfiles do local w = wantedfiles[k] - local fname = filejoin(ppname,w) - if isreadable(fname) then - if trace_detail then - report_resolving("found '%s' by scanning",fname) + local subpath = files[w] + if not subpath or subpath == "" then + -- rootscan already done + elseif type(subpath) == "string" then + local fname = check_subpath(filejoin(ppname,subpath,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + else + for i=1,#subpath do + local sp = subpath[i] + if sp == "" then + -- roottest already done + else + local fname = check_subpath(filejoin(ppname,sp,w)) + if fname then + result[#result+1] = fname + done = true + if not allresults then + break + end + end + end + end + if done and not allresults then + break end - result[#result+1] = fname - done = true - if not allresults then break end end end - else - -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end + else + -- no access needed for non existing path, speedup (esp in large tree with lots of fake) end end - if not done and doscan then - -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF - end - if done and not allresults then break end + end + -- todo recursive scanning + if done and not allresults then + return #result > 0 and result end end end - for k=1,#result do - local rk = collapsepath(result[k]) - result[k] = rk - resolvers.registerintrees(rk) -- for tracing used files +end + +local function find_onpath(filename,filetype,wantedfiles,allresults) + if trace_detail then + report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) end - if stamp then - instance.found[stamp] = result + local result = { } + for k=1,#wantedfiles do + local fname = wantedfiles[k] + if fname and isreadable(fname) then + filename = fname + result[#result+1] = filejoin('.',fname) + if not allresults then + break + end + end end - return result + return #result > 0 and result end --- -- -- begin of main file search routing -- -- -- - ---~ local collect_instance_files - ---~ local function find_direct(filename) ---~ if not dangerous[askedformat] and isreadable(filename) then ---~ if trace_detail then ---~ report_resolving("file '%s' found directly",filename) ---~ end ---~ return { filename } ---~ end ---~ end - ---~ local function find_wildcard(filename) ---~ if find(filename,'%*') then ---~ if trace_locating then ---~ report_resolving("checking wildcard '%s'", filename) ---~ end ---~ return resolvers.findwildcardfiles(filename) -- we can use the local ---~ end ---~ end - ---~ local function find_qualified(filename) -- this one will be split too ---~ if not file.is_qualified_path(filename) then ---~ return ---~ end ---~ if trace_locating then ---~ report_resolving("checking qualified name '%s'", filename) ---~ end ---~ if isreadable(filename) then ---~ if trace_detail then ---~ report_resolving("qualified file '%s' found", filename) ---~ end ---~ return { filename } ---~ else ---~ if trace_detail then ---~ report_resolving("locating qualified file '%s'", filename) ---~ end ---~ local forcedname, suffix = "", fileextname(filename) ---~ if suffix == "" then -- why ---~ local format_suffixes = askedformat == "" and resolvers.defaultsuffixes or suffixes[askedformat] ---~ if format_suffixes then ---~ for i=1,#format_suffixes do ---~ local s = format_suffixes[i] ---~ forcedname = filename .. "." .. s ---~ if isreadable(forcedname) then ---~ if trace_locating then ---~ report_resolving("no suffix, forcing format filetype '%s'", s) ---~ end ---~ return { forcedname } ---~ end ---~ end ---~ end ---~ end ---~ if suffix ~= "" then ---~ -- try to find in tree (no suffix manipulation), here we search for the ---~ -- matching last part of the name ---~ local basename = filebasename(filename) ---~ local pattern = lpegmatch(preparetreepattern,filename) ---~ -- messy .. to be sorted out ---~ local savedformat = askedformat ---~ local format = savedformat or "" ---~ if format == "" then ---~ askedformat = resolvers.formatofsuffix(suffix) ---~ end ---~ if not format then ---~ askedformat = "othertextfiles" -- kind of everything, maybe all ---~ end ---~ -- ---~ if basename ~= filename then ---~ local resolved = collect_instance_files(basename,askedformat,allresults) ---~ if #resolved == 0 then ---~ local lowered = lower(basename) ---~ if filename ~= lowered then ---~ resolved = collect_instance_files(lowered,askedformat,allresults) ---~ end ---~ end ---~ resolvers.format = savedformat ---~ -- ---~ if #resolved > 0 then ---~ local result = { } ---~ for r=1,#resolved do ---~ local rr = resolved[r] ---~ if find(rr,pattern) then ---~ result[#result+1] = rr ---~ end ---~ end ---~ if #result > 0 then ---~ return result ---~ end ---~ end ---~ end ---~ -- a real wildcard: ---~ -- ---~ -- local filelist = collect_files({basename}) ---~ -- result = { } ---~ -- for f=1,#filelist do ---~ -- local ff = filelist[f][3] or "" ---~ -- if find(ff,pattern) then ---~ -- result[#result+1], ok = ff, true ---~ -- end ---~ -- end ---~ -- if #result > 0 then ---~ -- return result ---~ -- end ---~ end ---~ end ---~ end - ---~ local function find_analyze(filename,askedformat) ---~ local filetype, wantedfiles, ext = '', { }, fileextname(filename) ---~ -- too tricky as filename can be bla.1.2.3: ---~ -- ---~ -- if not suffixmap[ext] then ---~ -- wantedfiles[#wantedfiles+1] = filename ---~ -- end ---~ wantedfiles[#wantedfiles+1] = filename ---~ if askedformat == "" then ---~ if ext == "" or not suffixmap[ext] then ---~ local defaultsuffixes = resolvers.defaultsuffixes ---~ for i=1,#defaultsuffixes do ---~ local forcedname = filename .. '.' .. defaultsuffixes[i] ---~ wantedfiles[#wantedfiles+1] = forcedname ---~ filetype = resolvers.formatofsuffix(forcedname) ---~ if trace_locating then ---~ report_resolving("forcing filetype '%s'",filetype) ---~ end ---~ end ---~ else ---~ filetype = resolvers.formatofsuffix(filename) ---~ if trace_locating then ---~ report_resolving("using suffix based filetype '%s'",filetype) ---~ end ---~ end ---~ else ---~ if ext == "" or not suffixmap[ext] then ---~ local format_suffixes = suffixes[askedformat] ---~ if format_suffixes then ---~ for i=1,#format_suffixes do ---~ wantedfiles[#wantedfiles+1] = filename .. "." .. format_suffixes[i] ---~ end ---~ end ---~ end ---~ filetype = askedformat ---~ if trace_locating then ---~ report_resolving("using given filetype '%s'",filetype) ---~ end ---~ end ---~ return filetype, wantedfiles ---~ end - ---~ local function find_intree(filename,filetype,wantedfiles) ---~ local typespec = resolvers.variableofformat(filetype) ---~ local pathlist = resolvers.expandedpathlist(typespec) ---~ if pathlist and #pathlist > 0 then ---~ -- list search ---~ local filelist = collect_files(wantedfiles) ---~ local dirlist = { } ---~ if filelist then ---~ for i=1,#filelist do ---~ dirlist[i] = filedirname(filelist[i][3]) .. "/" -- was [2] .. gamble ---~ end ---~ end ---~ if trace_detail then ---~ report_resolving("checking filename '%s'",filename) ---~ end ---~ local result = { } ---~ for k=1,#pathlist do ---~ local path = pathlist[k] ---~ local pathname = lpegmatch(inhibitstripper,path) ---~ local doscan = path == pathname -- no ^!! ---~ local done = false ---~ -- using file list ---~ if filelist then ---~ -- compare list entries with permitted pattern -- /xx /xx// ---~ local expression = makepathexpression(pathname) ---~ if trace_detail then ---~ report_resolving("using pattern '%s' for path '%s'",expression,pathname) ---~ end ---~ for k=1,#filelist do ---~ local fl = filelist[k] ---~ local f = fl[2] ---~ local d = dirlist[k] ---~ if find(d,expression) then ---~ -- todo, test for readable ---~ result[#result+1] = resolvers.resolve(fl[3]) -- no shortcut ---~ done = true ---~ if allresults then ---~ if trace_detail then ---~ report_resolving("match to '%s' in hash for file '%s' and path '%s', continue scanning",expression,f,d) ---~ end ---~ else ---~ if trace_detail then ---~ report_resolving("match to '%s' in hash for file '%s' and path '%s', quit scanning",expression,f,d) ---~ end ---~ break ---~ end ---~ elseif trace_detail then ---~ report_resolving("no match to '%s' in hash for file '%s' and path '%s'",expression,f,d) ---~ end ---~ end ---~ end ---~ if not done and doscan then ---~ -- check if on disk / unchecked / does not work at all / also zips ---~ local scheme = url.hasscheme(pathname) ---~ if not scheme or scheme == "file" then ---~ local pname = gsub(pathname,"%.%*$",'') ---~ if not find(pname,"%*") then ---~ local ppname = gsub(pname,"/+$","") ---~ if can_be_dir(ppname) then ---~ for k=1,#wantedfiles do ---~ local w = wantedfiles[k] ---~ local fname = filejoin(ppname,w) ---~ if isreadable(fname) then ---~ if trace_detail then ---~ report_resolving("found '%s' by scanning",fname) ---~ end ---~ result[#result+1] = fname ---~ done = true ---~ if not allresults then break end ---~ end ---~ end ---~ else ---~ -- no access needed for non existing path, speedup (esp in large tree with lots of fake) ---~ end ---~ end ---~ end ---~ end ---~ if not done and doscan then ---~ -- todo: slow path scanning ... although we now have tree:// supported in $TEXMF ---~ end ---~ if done and not allresults then ---~ return #result > 0 and result ---~ end ---~ end ---~ end ---~ end - ---~ local function find_onpath(filename,filetype,wantedfiles) ---~ local done = nil ---~ if trace_detail then ---~ report_resolving("checking filename '%s', filetype '%s', wanted files '%s'",filename, filetype or '?',concat(wantedfiles," | ")) ---~ end ---~ for k=1,#wantedfiles do ---~ local fname = wantedfiles[k] ---~ if fname and isreadable(fname) then ---~ filename, done = fname, true ---~ result[#result+1] = filejoin('.',fname) ---~ break ---~ end ---~ end ---~ end - ---~ local function find_otherwise(filename,filetype,wantedfiles) -- other text files | any | whatever ---~ local filelist = collect_files(wantedfiles) ---~ local fl = filelist and filelist[1] ---~ if fl then ---~ return { resolvers.resolve(fl[3]) } -- filename ---~ end ---~ end - ---~ collect_instance_files = function(filename,askedformat,allresults) -- uses nested ---~ local result, stamp, filetype, wantedfiles ---~ askedformat = askedformat or "" ---~ filename = collapsepath(filename) ---~ if instance.remember and not allresults then ---~ stamp = format("%s--%s", filename, askedformat) ---~ result = stamp and instance.found[stamp] ---~ if result then ---~ if trace_locating then ---~ report_resolving("remembered file '%s'",filename) ---~ end ---~ return result ---~ end ---~ end ---~ result = find_direct (filename,stamp) or ---~ find_wildcard (filename) or ---~ find_qualified(filename) ---~ if not result then ---~ filetype, wantedfiles = find_analyze(filename,askedformat) ---~ result = find_intree (filename,filetype,wantedfiles) or ---~ find_onpath (filename,filetype,wantedfiles) or ---~ find_otherwise(filename,filetype,wantedfiles) ---~ end ---~ if result then ---~ for k=1,#result do ---~ local rk = collapsepath(result[k]) ---~ result[k] = rk ---~ resolvers.registerintrees(rk) -- for tracing used files ---~ end ---~ else ---~ result = { } -- maybe false ---~ end ---~ if stamp then ---~ if trace_locating then ---~ report_resolving("remembering file '%s'",filename) ---~ end ---~ instance.found[stamp] = result ---~ end ---~ return result ---~ end +local function find_otherwise(filename,filetype,wantedfiles,allresults) -- other text files | any | whatever + local filelist = collect_files(wantedfiles) + local fl = filelist and filelist[1] + if fl then + return { resolvers.resolve(fl[3]) } -- filename + end +end + +collect_instance_files = function(filename,askedformat,allresults) -- uses nested + local result, stamp, filetype, wantedfiles + askedformat = askedformat or "" + filename = collapsepath(filename) + if allresults then + -- no need for caching, only used for tracing + local filetype, wantedfiles = find_analyze(filename,askedformat) + local results = { + { method = "direct", list = find_direct (filename,stamp,true) }, + { method = "wildcard", list = find_wildcard (filename,true) }, + { method = "qualified", list = find_qualified(filename,true) }, + { method = "in tree", list = find_intree (filename,filetype,wantedfiles,true) }, + { method = "on path", list = find_onpath (filename,filetype,wantedfiles,true) }, + { method = "otherwise", list = find_otherwise(filename,filetype,wantedfiles,true) }, + } + local result, status, done = { }, { }, { } + for k, r in next, results do + local method, list = r.method, r.list + if list then + for i=1,#list do + local c = collapsepath(list[i]) + if not done[c] then + result[#result+1] = c + done[c] = true + end + status[#status+1] = format("%-10s: %s",method,c) + end + end + end + if trace_detail then + report_resolving("lookup status: %s",table.serialize(status,filename)) + end + return result, status + else + if instance.remember then + stamp = format("%s--%s", filename, askedformat) + result = stamp and instance.found[stamp] + if result then + if trace_locating then + report_resolving("remembered file '%s'",filename) + end + return result + end + end + result = find_direct (filename,stamp) or + find_wildcard (filename) or + find_qualified(filename) + if not result then + local filetype, wantedfiles = find_analyze(filename,askedformat) + result = find_intree (filename,filetype,wantedfiles) or + find_onpath (filename,filetype,wantedfiles) or + find_otherwise(filename,filetype,wantedfiles) + end + if result then + for k=1,#result do + local rk = collapsepath(result[k]) + result[k] = rk + resolvers.registerintrees(rk) -- for tracing used files + end + else + result = { } -- maybe false + end + if stamp then + if trace_locating then + report_resolving("remembering file '%s'",filename) + end + instance.found[stamp] = result + end + return result + end +end -- -- -- end of main file search routing -- -- -- + local function findfiles(filename,filetype,allresults) - local result = collect_instance_files(filename,filetype or "",allresults) - if #result == 0 then + local result, status = collect_instance_files(filename,filetype or "",allresults) + if not result or #result == 0 then local lowered = lower(filename) if filename ~= lowered then - return collect_instance_files(lowered,filetype or "",allresults) + result, status = collect_instance_files(lowered,filetype or "",allresults) end end - return result + return result or { }, status end function resolvers.findfiles(filename,filetype) @@ -1632,6 +1456,10 @@ function resolvers.load(option) return files and next(files) and true end +function resolvers.loadtime() + return statistics.elapsedtime(instance) +end + local function report(str) if trace_locating then report_resolving(str) -- has already verbose @@ -1645,6 +1473,9 @@ function resolvers.dowithfilesandreport(command, files, ...) -- will move if trace_locating then report('') -- ? end + if type(files) == "string" then + files = { files } + end for f=1,#files do local file = files[f] local result = command(file,...) diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua index 14148b20e..4b3fba33b 100644 --- a/tex/context/base/data-tex.lua +++ b/tex/context/base/data-tex.lua @@ -21,13 +21,13 @@ local fileprocessor = nil local lineprocessor = nil local textfileactions = sequencers.reset { - arguments = "str,filename", + arguments = "str,filename,coding", returnvalues = "str", results = "str", } local textlineactions = sequencers.reset { - arguments = "str,filename,linenumber,noflines", + arguments = "str,filename,linenumber,noflines,coding", returnvalues = "str", results = "str", } @@ -46,7 +46,7 @@ appendgroup(textlineactions,"before") -- user appendgroup(textlineactions,"system") -- private appendgroup(textlineactions,"after" ) -- user -function helpers.textopener(tag,filename,filehandle) +function helpers.textopener(tag,filename,filehandle,coding) local lines local t_filehandle = type(filehandle) if not filehandle then @@ -60,7 +60,7 @@ function helpers.textopener(tag,filename,filehandle) filehandle:close() end if type(lines) == "string" then - local coding = utffiletype(lines) + local coding = coding or utffiletype(lines) -- so we can signal no regime if trace_locating then report_tex("%s opener, '%s' opened using method '%s'",tag,filename,coding) end @@ -72,11 +72,11 @@ function helpers.textopener(tag,filename,filehandle) lines = unicode.utf32_to_utf8_be(lines) elseif coding == "utf-32-le" then lines = unicode.utf32_to_utf8_le(lines) - else -- utf8 or unknown + else -- utf8 or unknown (could be a mkvi file) if textfileactions.dirty then -- maybe use autocompile fileprocessor = sequencers.compile(textfileactions) end - lines = fileprocessor(lines,filename) or lines + lines = fileprocessor(lines,filename,coding) or lines lines = splitlines(lines) end elseif trace_locating then @@ -115,7 +115,7 @@ function helpers.textopener(tag,filename,filehandle) if textlineactions.dirty then lineprocessor = sequencers.compile(textlineactions) -- maybe use autocompile end - return lineprocessor(content,filename,currentline,noflines) or content + return lineprocessor(content,filename,currentline,noflines,coding) or content end end end diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua index 6e64fc4c7..6e235dd34 100644 --- a/tex/context/base/data-tmp.lua +++ b/tex/context/base/data-tmp.lua @@ -86,7 +86,7 @@ local function identify() end elseif not writable and caches.force then local cacheparent = file.dirname(cachepath) - if file.is_writable(cacheparent) then + if file.is_writable(cacheparent) and true then -- we go on anyway (needed for mojca's kind of paths) if not caches.ask or io.ask(format("\nShould I create the cache path %s?",cachepath), "no", { "yes", "no" }) == "yes" then mkdirs(cachepath) if isdir(cachepath) and file.is_writable(cachepath) then diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua index 88bf4f112..1b916bf26 100644 --- a/tex/context/base/data-tre.lua +++ b/tex/context/base/data-tre.lua @@ -48,9 +48,10 @@ end function resolvers.locators.tree(specification) local name = specification.filename - if name ~= '' and lfs.isdir(name) then + local realname = resolvers.resolve(name) -- no shortcut + if realname and realname ~= '' and lfs.isdir(realname) then if trace_locating then - report_trees("locator '%s' found",name) + report_trees("locator '%s' found",realname) end resolvers.appendhash('tree',name,false) -- don't cache elseif trace_locating then @@ -64,6 +65,8 @@ function resolvers.hashers.tree(specification) report_trees("analysing '%s'",name) end resolvers.methodhandler("hashers",name) + + resolvers.generators.file(specification) end resolvers.concatinators.tree = resolvers.concatinators.file diff --git a/tex/context/base/data-vir.lua b/tex/context/base/data-vir.lua index 5a288953f..89359c19a 100644 --- a/tex/context/base/data-vir.lua +++ b/tex/context/base/data-vir.lua @@ -55,7 +55,8 @@ function openers.virtual(specification) report_virtual("opener, file '%s' opened",original) end data[original] = nil - return openers.helpers.textopener("virtual",original,d) + -- With utf-8 we signal that no regime is to be applied! + return openers.helpers.textopener("virtual",original,d,"utf-8") else if trace_virtual then report_virtual("opener, file '%s' not found",original) diff --git a/tex/context/base/export-example.tex b/tex/context/base/export-example.tex index 6885cfe37..ae94642dd 100644 --- a/tex/context/base/export-example.tex +++ b/tex/context/base/export-example.tex @@ -1,3 +1,5 @@ +% language=uk + \usemodule[abr-01] \setupbackend @@ -81,7 +83,7 @@ e = mc^2 \startparagraph Okay, it's somewhat boring to always use the same formula, so how about -$\sqrt{4} = 2$ or traveling at \unit{120 km/h} instead of $\unit{110 km/h}$. +$\sqrt{4} = 2$ or travelling at \unit{120 km/h} instead of $\unit{110 km/h}$. \stopparagraph \bTABLE diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua index 61970f734..0caa93b76 100644 --- a/tex/context/base/font-con.lua +++ b/tex/context/base/font-con.lua @@ -146,14 +146,16 @@ function constructors.calculatescale(tfmdata,scaledpoints) return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta end -function constructors.assignmathparameters(target,tfmdata) +function constructors.assignmathparameters(target,original) -- dumb version, not used in context -- when a tfm file is loaded, it has already been scaled - -- and it never enters the scaled so this is otf only + -- and it never enters the scaled so this is otf only and + -- even then we do some extra in the context math plugins local mathparameters = original.mathparameters if mathparameters and next(mathparameters) then local targetparameters = target.parameters + local targetproperties = target.properties local targetmathparameters = { } - local factor = targetparameters.factor + local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor for name, value in next, mathparameters do if name == "RadicalDegreeBottomRaisePercent" then targetmathparameters[name] = value @@ -161,15 +163,12 @@ function constructors.assignmathparameters(target,tfmdata) targetmathparameters[name] = value * factor end end - if not targetmathparameters.AccentBaseHeight then - targetmathparameters.AccentBaseHeight = nil -- safeguard, still needed? - end - if not targetmathparameters.FractionDelimiterSize then - targetmathparameters.FractionDelimiterSize = 0 - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - targetmathparameters.FractionDelimiterDisplayStyleSize = 0 - end + -- if not targetmathparameters.FractionDelimiterSize then + -- targetmathparameters.FractionDelimiterSize = 0 + -- end + -- if not mathparameters.FractionDelimiterDisplayStyleSize then + -- targetmathparameters.FractionDelimiterDisplayStyleSize = 0 + -- end target.mathparameters = targetmathparameters end end @@ -178,7 +177,7 @@ function constructors.scale(tfmdata,specification) local target = { } -- the new table -- if tonumber(specification) then - specification = { size = specification } + specification = { size = specification } end -- local scaledpoints = specification.size diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua index 5b8a4e8ec..e39e29080 100644 --- a/tex/context/base/font-ctx.lua +++ b/tex/context/base/font-ctx.lua @@ -1211,17 +1211,15 @@ if environment.initex then for what, handler in table.sortedpairs(handlers) do local features = handler.features if features then - local t = { } - t[#t+1] = "[" - t[#t+1] = what - t[#t+1] = format("(base initializers: %s)",names(features.initializers.base)) - t[#t+1] = format("(base processors: %s)", names(features.processors .base)) - t[#t+1] = format("(base manipulators: %s)",names(features.manipulators.base)) - t[#t+1] = format("(node initializers: %s)",names(features.initializers.node)) - t[#t+1] = format("(node processors: %s)", names(features.processors .node)) - t[#t+1] = format("(node manipulators: %s)",names(features.manipulators.node)) - t[#t+1] = "]" - l[#l+1] = concat(t, " ") + l[#l+1] = format("[%s (base initializers: %s) (base processors: %s) (base manipulators: %s) (node initializers: %s) (node processors: %s) (node manipulators: %s)]", + what, + names(features.initializers.base), + names(features.processors .base), + names(features.manipulators.base), + names(features.initializers.node), + names(features.processors .node), + names(features.manipulators.node) + ) end end return concat(l, " | ") diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv index af3d5ba78..76376f01b 100644 --- a/tex/context/base/font-ini.mkiv +++ b/tex/context/base/font-ini.mkiv @@ -492,18 +492,22 @@ %D check for fontclass being default or empty and save a few %D tests but it does not help us when no math is defined. -\let\mrfam\zerocount % math regular -\let\mbfam\plusone % math bold +\let\mrfam \zerocount % math regular +\let\mrfamlr\plusone % math regular l2r +\let\mrfamrl\plustwo % math regular r2l -\unexpanded\def\mr{\ifmmode\fam\mrfam\else\setcurrentfontalternative\c!mr\fi} -\unexpanded\def\mb{\ifmmode\fam\mbfam\else\setcurrentfontalternative\c!mb\fi} +\let\mbfam \plusthree % math bold +\let\mbfamlr\plusfour % math bold l2r +\let\mbfamrl\plusfive % math bold r2l + +\definesystemattribute[mathfamily][public] + +\newconditional\bidirectionalmathstrategy \def\mathtextsuffix {-text} \def\mathscriptsuffix {-script} \def\mathscriptscriptsuffix{-scriptscript} -% \let\mathsizesuffix\empty - \let\currentmathsize\empty \def\mathsizesuffix{\ifcase0\currentmathsize\or\mathtextsuffix\or\mathscriptsuffix\or\mathscriptscriptsuffix\fi} @@ -530,14 +534,14 @@ \csname \fontbody\c!mm\fontfamily\fontsize\currentmathsize\endcsname \else \ifcsname \fontbody\c!mm\fontfamily \currentmathsize\endcsname \autofontsizetrue \csname \fontbody\c!mm\fontfamily \currentmathsize\endcsname \else - \nullfont \autofontsizetrue + \nullfont \autofontsizetrue \fi\fi} \def\dosetmathfamily#1#2% {\let\savedfontbody\fontbody % op hoger plan \let\fontfamily#2% % new per 20100817 - \checkbodyfontenvironment[\scriptscriptface]% + \checkbodyfontenvironment[\scriptscriptface]% pretty slow when many switches \checkbodyfontenvironment[\scriptface]% \checkbodyfontenvironment[\textface]% % @@ -548,20 +552,101 @@ \let\fontbody\savedfontbody \autofontsizefalse} + +% It would be nice if characters could be defined in a neutral way (say fam 255) and +% be mapped to a real family during noad list construction. However, this changes +% tex in critical places so for the moment we simulate this using manipulation. + +% For tracing purposes we use three families but in l2r mode 1 and 2 are copies of 0 +% while in rl mode 0 is a copy of 1. There is no real overhead involved in this. + +% \appendtoks +% \dosetmathfamily\mrfam\c!mr +% \to \mathstrategies + +\appendtoks + % why here .. + \edef\@@fontclassdirection{\ifcsname\fontclass\c!mm\s!direction\endcsname\csname\fontclass\c!mm\s!direction\endcsname\fi}% + % ... + \ifx\@@fontclassdirection\v!both + \settrue\bidirectionalmathstrategy + \else + \setfalse\bidirectionalmathstrategy + \fi +\to \mathstrategies + \appendtoks - \dosetmathfamily\mrfam\c!mr + \ifconditional\bidirectionalmathstrategy + \dosetmathfamily \mrfamlr\c!mrlr + \dosetmathfamily \mrfamrl\c!mrrl + \textfont \mrfam \textfont \mrfamlr + \scriptfont \mrfam \scriptfont \mrfamlr + \scriptscriptfont\mrfam \scriptscriptfont\mrfamlr + \else + \dosetmathfamily \mrfam \c!mr + \textfont \mrfamrl\textfont \mrfam + \scriptfont \mrfamrl\scriptfont \mrfam + \scriptscriptfont\mrfamrl\scriptscriptfont\mrfam + \textfont \mrfamlr\textfont \mrfam + \scriptfont \mrfamlr\scriptfont \mrfam + \scriptscriptfont\mrfamlr\scriptscriptfont\mrfam + \fi \to \mathstrategies -% not official ! +\appendtoks + \fam\defaultmathfamily +\to \everymathematics -\newconditional\boldmathmode % might change ... maybe \mathfontsupport 1 (normal) 2 (bold too) +\unexpanded\def\synchronizemathfamily + {\attribute\mathfamilyattribute\ifconditional\bidirectionalmathstrategy + \ifconditional\mathematics_right_to_left + \plustwo + \else + \plusone + \fi + \else + \zerocount + \fi} + +% Bold is somewhat special as we might want both full-bold-math mixed +% regular-math, as well as automatic adaption to outer bold (in titles +% and inline text bold) so we will need explicit switches as well as +% an automatic one. (We will use lucida as an example.) + +% \unexpanded\def\mr{\ifmmode\fam\mrfam\else\setcurrentfontalternative\c!mr\fi} +% \unexpanded\def\mb{\ifmmode\fam\mbfam\else\setcurrentfontalternative\c!mb\fi} -\def\enableboldmath {\settrue \boldmathmode} % todo: \setupbodyfont[boldmath,...] -\def\disableboldmath{\setfalse\boldmathmode} +\unexpanded\def\mr + {\ifmmode + \synchronizemathfamily + \else + \setcurrentfontalternative\c!mr + \fi} + +\unexpanded\def\mb + {\ifmmode + \fam\mbfam + \else + \setcurrentfontalternative\c!mb + \fi} + +\let\mr\mb % for the moment \appendtoks - \ifconditional\boldmathmode\dosetmathfamily\mbfam\c!mb\fi -\to \mathstrategies + \synchronizemathfamily +\to \everymathematics + +% not official (doesn't work currently as some math definitions are bound to families) and +% we're not going to mix families +% +% \newconditional\boldmathmode % might change ... maybe \mathfontsupport 1 (normal) 2 (bold too) +% +% \def\enableboldmath {\settrue \boldmathmode} % todo: \setupbodyfont[boldmath,...] +% \def\disableboldmath{\setfalse\boldmathmode} +% +% \appendtoks +% \ifconditional\boldmathmode\dosetmathfamily\mbfam\c!mb\fi +% \to \mathstrategies %D All used styles, like rm, ss and tt, are saved in a comma %D separated list. Appart from practical limitations one can @@ -765,14 +850,18 @@ "\@@fontclassgoodies", % experiment (not yet used) "\@@fontgoodies" % experiment )}% -% \edef\somefontspec{at \somefontsize}% we need the resolved designsize (for fallbacks) \edef\somefontspec{at \number\scaledfontsize sp}% we need the resolved designsize (for fallbacks) \expandafter\let\expandafter\lastrawfontcall\csname#2\endcsname \the\everydefinefont \featureinheritancemode\featureinheritancedefault} +% Why these expanded and rscale not ... maybe not worth the trouble (delayed +% expansion actually would be better i.e. macros in feature specs). Test +% without pre-expansion. + \def\updatefontclassparameters - {\edef\@@fontclassfeatures {\ifcsname\fontclass\fontstyle\s!features \endcsname\csname\fontclass\fontstyle\s!features \endcsname\fi}% + {\edef\@@fontclassdirection{\ifcsname\fontclass\fontstyle\s!direction\endcsname\csname\fontclass\fontstyle\s!direction\endcsname\fi}% + \edef\@@fontclassfeatures {\ifcsname\fontclass\fontstyle\s!features \endcsname\csname\fontclass\fontstyle\s!features \endcsname\fi}% \edef\@@fontclassfallbacks{\ifcsname\fontclass\fontstyle\s!fallbacks\endcsname\csname\fontclass\fontstyle\s!fallbacks\endcsname\fi}% \edef\@@fontclassgoodies {\ifcsname\fontclass\fontstyle\s!goodies \endcsname\csname\fontclass\fontstyle\s!goodies \endcsname\fi}} @@ -1700,6 +1789,9 @@ %D user definitions like \type{\tfw} or \type{\bfq} for real %D large alternatives. +%D If we move design size info to the lfg file (after all only lm +%D has design sizes) we can get rid of much code .. 2012 or so. + \unexpanded\def\definebodyfont {\doquadrupleempty\redefinebodyfont} @@ -2118,14 +2210,6 @@ %D sequence of a session. After the loading job is done, the %D macro relaxes itself and reset the signal. -% \fontdimen1\nullfont 0\scaledpoint -% \fontdimen2\nullfont 256377\scaledpoint -% \fontdimen3\nullfont 128188\scaledpoint -% \fontdimen4\nullfont 85459\scaledpoint -% \fontdimen5\nullfont 338952\scaledpoint -% \fontdimen6\nullfont 786432\scaledpoint -% \fontdimen7\nullfont 85459\scaledpoint - \fontslantperpoint \nullfont 0\scaledpoint \fontinterwordspace \nullfont 256377\scaledpoint \fontinterwordstretch\nullfont 128188\scaledpoint @@ -2345,11 +2429,12 @@ \trycurrentfontclass{#1}% \fi\fi\fi} -\def\savefontclassparameters#1#2#3#4#5% #1=rm|ss|.. rscale features fallbacks goodies +\def\savefontclassparameters#1#2#3#4#5#6% #1=rm|ss|.. rscale features fallbacks goodies direction {\setxvalue{\fontclass#1\s!rscale }{#2}% \setxvalue{\fontclass#1\s!features }{#3}% \setxvalue{\fontclass#1\s!fallbacks}{#4}% - \setxvalue{\fontclass#1\s!goodies }{#5}} + \setxvalue{\fontclass#1\s!goodies }{#5}% + \setxvalue{\fontclass#1\s!direction}{#6}} \settrue\autotypescripts @@ -2951,12 +3036,33 @@ kern=yes, tlig=yes, trep=yes, + mathalternates=yes, language=dflt, script=math] -\definefontfeature[math-text] [virtualmath][mathalternates=yes,ssty=no] -\definefontfeature[math-script] [virtualmath][mathalternates=yes,ssty=1,mathsize=yes] -\definefontfeature[math-scriptscript][virtualmath][mathalternates=yes,ssty=2,mathsize=yes] +\definefontfeature + [virtualmath-l2r] + [virtualmath] + [] + +\definefontfeature + [virtualmath-r2l] + [virtualmath] + [language=ara, + rtlm=yes, + locl=yes] + +\definefontfeature[math-text] [virtualmath] [ssty=no] +\definefontfeature[math-script] [virtualmath] [ssty=1,mathsize=yes] +\definefontfeature[math-scriptscript] [virtualmath] [ssty=2,mathsize=yes] + +\definefontfeature[math-text-l2r] [virtualmath-l2r] [ssty=no] +\definefontfeature[math-script-l2r] [virtualmath-l2r] [ssty=1,mathsize=yes] +\definefontfeature[math-scriptscript-l2r] [virtualmath-l2r] [ssty=2,mathsize=yes] + +\definefontfeature[math-text-r2l] [virtualmath-r2l] [ssty=no] +\definefontfeature[math-script-r2l] [virtualmath-r2l] [ssty=1,mathsize=yes] +\definefontfeature[math-scriptscript-r2l] [virtualmath-r2l] [ssty=2,mathsize=yes] \definefontfeature [math-nostack-text] [math-text] [nostackmath=yes] \definefontfeature [math-nostack-script] [math-script] [nostackmath=yes] diff --git a/tex/context/base/font-unk.mkiv b/tex/context/base/font-unk.mkiv index 3cf0852be..90cab07a0 100644 --- a/tex/context/base/font-unk.mkiv +++ b/tex/context/base/font-unk.mkiv @@ -43,25 +43,7 @@ \definefontsynonym [MonoCaps] [unknown] \definefontsynonym [MathRoman] [unknown] -\definefontsynonym [MathExtension] [unknown] -\definefontsynonym [MathItalic] [unknown] -\definefontsynonym [MathSymbol] [unknown] - -\definefontsynonym [MathNoName] [unknown] - -\definefontsynonym [MathAlpha] [unknown] -\definefontsynonym [MathBeta] [unknown] -\definefontsynonym [MathGamma] [unknown] -\definefontsynonym [MathDelta] [unknown] - -\definefontsynonym [MathRomanBold] [MathRoman] % todo: -\definefontsynonym [MathExtensionBold] [MathExtension] % [MathRoman] -\definefontsynonym [MathItalicBold] [MathItalic] % [MathRoman] -\definefontsynonym [MathSymbolBold] [MathSymbol] % [MathRoman] -\definefontsynonym [MathAlphaBold] [MathAlpha] % [MathRoman] -\definefontsynonym [MathBetaBold] [MathBeta] % [MathRoman] -\definefontsynonym [MathGammaBold] [MathGamma] % [MathRoman] -\definefontsynonym [MathDeltaBold] [MathDelta] % [MathRoman] +\definefontsynonym [MathRomanBold] [MathRoman] \definefontsynonym [Handwriting] [unknown] \definefontsynonym [Calligraphic] [unknown] @@ -144,10 +126,30 @@ sc=MonoCaps sa 1] \definebodyfont [default] [mm] - [mr=MathRoman mo 1] - -\definebodyfont [bfmath] [mm] - [mr=MathRomanBold mo 1] + [mr=MathRoman mo 1, + mrlr=MathRomanL2R mo 1, + mrrl=MathRomanR2L mo 1] + +% \definebodyfont [default] [mm] +% [mr=MathRoman mo 1, +% mrlr=MathRomanL2R mo 1, +% mrrl=MathRomanR2L mo 1, +% mb=MathRomanBold mo 1, +% mblr=MathRomanBoldL2R mo 1, +% mbrl=MathRomanBoldR2L mo 1] + +\definebodyfont [bfmath] [mm] % why not just bold + [mr=MathRomanBold mo 1, + mrlr=MathRomanBoldL2R mo 1, + mrrl=MathRomanBoldR2L mo 1] + +% \definebodyfont [bidi] [mm] +% [mrlr=MathRomanL2R mo 1, +% mrrl=MathRomanR2L mo 1] + +% \definebodyfont [bidibfmath] [mm] +% [mrlr=MathRomanBoldL2R mo 1, +% mrrl=MathRomanBoldR2L mo 1] \definebodyfont [default] [hw] [tf=Handwriting sa 1] diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua index f7e8ad7b1..0f89ba0c2 100644 --- a/tex/context/base/l-lpeg.lua +++ b/tex/context/base/l-lpeg.lua @@ -8,6 +8,44 @@ if not modules then modules = { } end modules ['l-lpeg'] = { local lpeg = require("lpeg") +-- tracing (only used when we encounter a problem in integration of lpeg in luatex) + +local report = texio and texio.write_nl or print + +--~ local lpmatch = lpeg.match +--~ local lpprint = lpeg.print +--~ local lpp = lpeg.P +--~ local lpr = lpeg.R +--~ local lps = lpeg.S +--~ local lpc = lpeg.C +--~ local lpb = lpeg.B +--~ local lpv = lpeg.V +--~ local lpcf = lpeg.Cf +--~ local lpcb = lpeg.Cb +--~ local lpcg = lpeg.Cg +--~ local lpct = lpeg.Ct +--~ local lpcs = lpeg.Cs +--~ local lpcc = lpeg.Cc +--~ local lpcmt = lpeg.Cmt +--~ local lpcarg = lpeg.Carg + +--~ function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end + +--~ function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end +--~ function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end +--~ function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end +--~ function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end +--~ function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end +--~ function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end +--~ function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end +--~ function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end +--~ function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end +--~ function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end +--~ function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end +--~ function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end +--~ function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end +--~ function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end + local type = type local byte, char = string.byte, string.char @@ -116,17 +154,17 @@ patterns.unspacer = ((patterns.spacer^1)/"")^0 patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 patterns.beginline = #(1-newline) -local unquoted = Cs(patterns.unquoted * endofstring) -- not C - -function string.unquoted(str) - return match(unquoted,str) or str -end - --- more efficient: +-- local unquoted = Cs(patterns.unquoted * endofstring) -- not C +-- +-- function string.unquoted(str) +-- return match(unquoted,str) or str +-- end +-- +-- more efficient on long strings: local unquoted = ( - squote * Cs(1 - P(-2)) * squote - + dquote * Cs(1 - P(-2)) * dquote + squote * Cs((1 - P(-2))^0) * squote + + dquote * Cs((1 - P(-2))^0) * dquote ) function string.unquoted(str) @@ -135,10 +173,12 @@ end patterns.unquoted = unquoted ---~ print(string.unquoted("test")) ---~ print(string.unquoted([["t\"est"]])) ---~ print(string.unquoted([["t\"est"x]])) ---~ print(string.unquoted("\'test\'")) +-- print(string.unquoted("test")) +-- print(string.unquoted([["t\"est"]])) +-- print(string.unquoted([["t\"est"x]])) +-- print(string.unquoted("\'test\'")) +-- print(string.unquoted('"test"')) +-- print(string.unquoted('"test"')) function lpeg.anywhere(pattern) --slightly adapted from website return P { P(pattern) + 1 * V(1) } -- why so complex? @@ -603,3 +643,9 @@ function lpeg.append(list,pp) end --~ Cf(Ct("") * (Cg(C(...) * "=" * Cs(...)))^0, rawset) + +--~ for k, v in next, patterns do +--~ if type(v) ~= "table" then +--~ lpeg.print(v) +--~ end +--~ end diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua index d75135a2a..69f32cc39 100644 --- a/tex/context/base/l-url.lua +++ b/tex/context/base/l-url.lua @@ -45,12 +45,19 @@ local nothing = Cc("") local escaped = (plus / " ") + (percent * C(hexdigit * hexdigit) / tochar) -- we assume schemes with more than 1 character (in order to avoid problems with windows disks) +-- we also assume that when we have a scheme, we also have an authority -local scheme = Cs((escaped+(1-colon-slash-qmark-hash))^2) * colon + nothing -local authority = slash * slash * Cs((escaped+(1- slash-qmark-hash))^0) + nothing -local path = slash * Cs((escaped+(1- qmark-hash))^0) + nothing -local query = qmark * Cs((escaped+(1- hash))^0) + nothing -local fragment = hash * Cs((escaped+(1- endofstring))^0) + nothing +local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2) +local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0) +local pathstr = Cs((escaped+(1- qmark-hash))^0) +local querystr = Cs((escaped+(1- hash))^0) +local fragmentstr = Cs((escaped+(1- endofstring))^0) + +local scheme = schemestr * colon + nothing +local authority = slash * slash * authoritystr + nothing +local path = slash * pathstr + nothing +local query = qmark * querystr + nothing +local fragment = hash * fragmentstr + nothing local validurl = scheme * authority * path * query * fragment local parser = Ct(validurl) @@ -71,11 +78,16 @@ local function split(str) return (type(str) == "string" and lpegmatch(parser,str)) or str end +local isscheme = schemestr * colon * slash * slash -- this test also assumes authority + local function hasscheme(str) - local scheme = lpegmatch(scheme,str) -- at least one character - return scheme and scheme ~= "" + local scheme = lpegmatch(isscheme,str) -- at least one character + return scheme ~= "" and scheme or false end +--~ print(hasscheme("home:")) +--~ print(hasscheme("home://")) + -- todo: cache them local rootletter = R("az","AZ") diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua index e401e4148..e7422a38c 100644 --- a/tex/context/base/lang-url.lua +++ b/tex/context/base/lang-url.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['lang-url'] = { local utf = unicode.utf8 local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues -local utfbyte, utfgsub = utf.byte, utf.gsub +local utfbyte, utfchar, utfgsub = utf.byte, utf.char, utf.gsub context = context @@ -26,48 +26,52 @@ dealing with <l n='ascii'/> characters.</p> commands.hyphenatedurl = commands.hyphenatedurl or { } local hyphenatedurl = commands.hyphenatedurl -hyphenatedurl.characters = utilities.storage.allocate { - ["!"] = 1, - ["\""] = 1, - ["#"] = 1, - ["$"] = 1, - ["%"] = 1, - ["&"] = 1, - ["("] = 1, - ["*"] = 1, - ["+"] = 1, - [","] = 1, - ["-"] = 1, - ["."] = 1, - ["/"] = 1, - [":"] = 1, - [";"] = 1, - ["<"] = 1, - ["="] = 1, - [">"] = 1, - ["?"] = 1, - ["@"] = 1, - ["["] = 1, - ["\\"] = 1, - ["^"] = 1, - ["_"] = 1, - ["`"] = 1, - ["{"] = 1, - ["|"] = 1, - ["~"] = 1, - - ["'"] = 2, - [")"] = 2, - ["]"] = 2, - ["}"] = 2 +local characters = utilities.storage.allocate { + ["!"] = 1, + ["\""] = 1, + ["#"] = 1, + ["$"] = 1, + ["%"] = 1, + ["&"] = 1, + ["("] = 1, + ["*"] = 1, + ["+"] = 1, + [","] = 1, + ["-"] = 1, + ["."] = 1, + ["/"] = 1, + [":"] = 1, + [";"] = 1, + ["<"] = 1, + ["="] = 1, + [">"] = 1, + ["?"] = 1, + ["@"] = 1, + ["["] = 1, + ["\\"] = 1, + ["^"] = 1, + ["_"] = 1, + ["`"] = 1, + ["{"] = 1, + ["|"] = 1, + ["~"] = 1, + + ["'"] = 2, + [")"] = 2, + ["]"] = 2, + ["}"] = 2, } +local mapping = utilities.storage.allocate { +--~ [utfchar(0xA0)] = "~", -- nbsp (catch) +} + +hyphenatedurl.characters = characters +hyphenatedurl.mapping = mapping hyphenatedurl.lefthyphenmin = 2 hyphenatedurl.righthyphenmin = 3 hyphenatedurl.discretionary = nil -local chars = hyphenatedurl.characters - local function action(hyphenatedurl, str, left, right, disc) local n = 0 local b = math.max( left or hyphenatedurl.lefthyphenmin, 2) @@ -75,10 +79,11 @@ local function action(hyphenatedurl, str, left, right, disc) local d = disc or hyphenatedurl.discretionary for s in utfcharacters(str) do n = n + 1 + s = mapping[s] or s if s == d then context.d(utfbyte(s)) else - local c = chars[s] + local c = characters[s] if not c or n<=b or n>=e then context.n(utfbyte(s)) elseif c == 1 then diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua index 24f1e903f..3660d5a65 100644 --- a/tex/context/base/lpdf-ano.lua +++ b/tex/context/base/lpdf-ano.lua @@ -391,10 +391,12 @@ runners["special operation with arguments"] = runners["special"] function specials.internal(var,actions) -- better resolve in strc-ref local i = tonumber(var.operation) - local v = references.internals[i] + local v = i and references.internals[i] +--~ print(">>>>>>>",i) +--~ inspect(v) if not v then -- error - report_reference("no internal reference '%s'",var.operation) + report_reference("no internal reference '%s'",i or "?") elseif getinnermethod() == "names" then -- named return link(nil,nil,"aut:"..i,v.references.realpage,actions) @@ -453,6 +455,22 @@ function specials.userpage(var,actions) end end +-- sections + +--~ function specials.section(var,actions) +--~ local sectionname = var.operation +--~ local destination = var.arguments +--~ local internal = structures.sections.internalreference(sectionname,destination) +--~ if internal then +--~ var.special = "internal" +--~ var.operation = internal +--~ var.arguments = nil +--~ specials.internal(var,actions) +--~ end +--~ end + +specials.section = specials.internal -- specials.section just need to have a value as it's checked + -- todo, do this in references namespace ordered instead (this is an experiment) local splitter = lpeg.splitat(":") diff --git a/tex/context/base/luat-cod.lua b/tex/context/base/luat-cod.lua index 3512673f8..b022f31c3 100644 --- a/tex/context/base/luat-cod.lua +++ b/tex/context/base/luat-cod.lua @@ -77,7 +77,9 @@ end environment = environment or { } local environment = environment -local sourcefile = arg and arg[1] or "" +-- no string.unquoted yet + +local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1") local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or "" local targetpath = "." diff --git a/tex/context/base/luat-fio.lua b/tex/context/base/luat-fio.lua index b7144c343..1edbc2ef8 100644 --- a/tex/context/base/luat-fio.lua +++ b/tex/context/base/luat-fio.lua @@ -83,6 +83,10 @@ if not resolvers.instance then end -statistics.register("input load time", function() - return format("%s seconds", statistics.elapsedtime(resolvers.instance)) +statistics.register("resource resolver", function() + if resolvers.scantime then + return format("loadtime %s seconds, scantime %s seconds", resolvers.loadtime(), resolvers.scantime()) + else + return format("loadtime %s seconds", resolvers.loadtime()) + end end) diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua index 1df41eb33..d114bb83d 100644 --- a/tex/context/base/lxml-tab.lua +++ b/tex/context/base/lxml-tab.lua @@ -390,9 +390,7 @@ local function handle_any_entity(str) if resolve then local a = acache[str] -- per instance ! todo if not a then -print(">1",str,a) a = resolve_predefined and predefined_simplified[str] -print(">2",str,a) if a then -- one of the predefined elseif type(resolve) == "function" then @@ -400,7 +398,6 @@ print(">2",str,a) else a = entities[str] end -print(">3",str,a) if a then if type(a) == "function" then if trace_entities then @@ -408,9 +405,7 @@ if type(a) == "function" then end a = a(str) or "" end -print(">4",str,a) a = lpegmatch(parsedentity,a) or a -print(">5",str,a) if trace_entities then report_xml("resolved entity &%s; -> %s (internal)",str,a) end diff --git a/tex/context/base/math-act.lua b/tex/context/base/math-act.lua index 03675933e..4016c3172 100644 --- a/tex/context/base/math-act.lua +++ b/tex/context/base/math-act.lua @@ -75,8 +75,9 @@ sequencers.appendaction(mathactions,"system","mathematics.scaleparameters") function mathematics.checkaccentbaseheight(target,original) local mathparameters = target.mathparameters - if mathparameters then - mathparameters.AccentBaseHeight = nil -- safeguard + if mathparameters and mathparameters.AccentBaseHeight == 0 then + mathparameters.AccentBaseHeight = original.parameters.x_height or 0 +--~ mathparameters.AccentBaseHeight = target.parameters.vfactor * mathparameters.AccentBaseHeight end end diff --git a/tex/context/base/math-arr.mkiv b/tex/context/base/math-arr.mkiv index 3a7f2521e..326056ffb 100644 --- a/tex/context/base/math-arr.mkiv +++ b/tex/context/base/math-arr.mkiv @@ -17,7 +17,7 @@ %D These will be generalized! Is it still needed in \MKIV? -\def\exmthfont#1{\symbolicsizedfont#1\plusone{MathExtension}} +\def\exmthfont#1{\mr} % \symbolicsizedfont#1\plusone{MathExtension}} \def\domthfrac#1#2#3#4#5#6#7% {\begingroup @@ -26,7 +26,7 @@ \setbox2\hbox{$#1 #7$}% \dimen0\wd0 \ifdim\wd2>\dimen0 \dimen0\wd2 \fi - \setbox4\hbox to \dimen0{\exmthfont#2#3\leaders\hbox{#4}\hss#5}% + \setbox4\hbox to \dimen0{\leaders\hbox{#4}\hss#5}% \mathord{\vcenter{{\offinterlineskip \hbox to \dimen0{\hss\box0\hss}% \kern \ht4% @@ -76,6 +76,47 @@ \def\mtharrfactor{1} \def\mtharrextra {0} +% \def\domthxarr#1#2#3#4#5% hm, looks like we do a double mathrel +% {\begingroup +% \def\mtharrfactor{1}% +% \def\mtharrextra {0}% +% \processaction[#1] % will be sped up +% [ \v!none=>\def\mtharrfactor{0}, +% \v!small=>\def\mtharrextra{10}, +% \v!medium=>\def\mtharrextra{15}, +% \v!big=>\def\mtharrextra{20}, +% \v!normal=>, +% \v!default=>, +% \v!unknown=>\doifnumberelse{#1}{\def\mtharrextra{#1}}\donothing]% +% \mathsurround\zeropoint +% \muskip0=\thirdoffourarguments #2mu +% \muskip2=\fourthoffourarguments #2mu +% \muskip4=\firstoffourarguments #2mu +% \muskip6=\secondoffourarguments #2mu +% \muskip0=\mtharrfactor\muskip0 \advance\muskip0 \mtharrextra mu +% \muskip2=\mtharrfactor\muskip2 \advance\muskip2 \mtharrextra mu +% \setbox0\hbox{$\scriptstyle +% \mkern\muskip4\relax +% \mkern\muskip0\relax +% #5\relax +% \mkern\muskip2\relax +% \mkern\muskip6\relax +% $}% +% \setbox4\hbox{#3\displaystyle}% +% \dimen0\wd0 +% \ifdim\wd4>\dimen0 \dimen0\wd4 \fi +% \setbox2\hbox{$\scriptstyle +% \mkern\muskip4\relax +% \mkern\muskip0\relax +% #4\relax +% \mkern\muskip2\relax +% \mkern\muskip6\relax +% $}% +% \ifdim\wd2>\dimen0 \dimen0\wd2 \fi +% \setbox4\hbox to \dimen0{#3\displaystyle}% +% \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}\limits\normalsuperscript{\box0}\normalsubscript{\box2}}% +% \endgroup} + \def\domthxarr#1#2#3#4#5% hm, looks like we do a double mathrel {\begingroup \def\mtharrfactor{1}% @@ -91,26 +132,24 @@ \mathsurround\zeropoint \muskip0=\thirdoffourarguments #2mu \muskip2=\fourthoffourarguments #2mu - \muskip4=\firstoffourarguments #2mu - \muskip6=\secondoffourarguments #2mu - \muskip0=\mtharrfactor\muskip0 \advance\muskip0 \mtharrextra mu - \muskip2=\mtharrfactor\muskip2 \advance\muskip2 \mtharrextra mu + \muskip0=\mtharrfactor\muskip0 + \muskip2=\mtharrfactor\muskip2 + \advance\muskip0 \mtharrextra mu + \advance\muskip2 \mtharrextra mu + \advance\muskip0 \firstoffourarguments #2mu + \advance\muskip2 \secondoffourarguments #2mu \setbox0\hbox{$\scriptstyle - \mkern\muskip4\relax \mkern\muskip0\relax #5\relax \mkern\muskip2\relax - \mkern\muskip6\relax $}% \setbox4\hbox{#3\displaystyle}% \dimen0\wd0 \ifdim\wd4>\dimen0 \dimen0\wd4 \fi \setbox2\hbox{$\scriptstyle - \mkern\muskip4\relax \mkern\muskip0\relax #4\relax \mkern\muskip2\relax - \mkern\muskip6\relax $}% \ifdim\wd2>\dimen0 \dimen0\wd2 \fi \setbox4\hbox to \dimen0{#3\displaystyle}% @@ -122,6 +161,16 @@ %D There are some arrows which are created by stacking two arrows. The next %D macro helps in defining such \quotation{double arrows}. +% weird, we get a shift with the double ... but will become core luatex anyway +% +% \startchemicalformula +% \chemical{S} +% \chemical{+} +% \chemical{O_2} +% \chemical{EQUILIBRIUM}{boven}{onder} +% \chemical{SO_2} +% \stopchemicalformula + \def\domthxarrdouble#1#2#3#4#5#6#7% opt l r sp rs top bot {\mathrel {\scratchdimen.32ex\relax % was .22, todo: make configurable diff --git a/tex/context/base/math-def.mkiv b/tex/context/base/math-def.mkiv index 3b336855a..b8febc621 100644 --- a/tex/context/base/math-def.mkiv +++ b/tex/context/base/math-def.mkiv @@ -18,7 +18,7 @@ % this will be done at the lua end \startluacode - mathematics.define() + mathematics.define(\number\defaultmathfamily) -- mathematics.xml.registerentities() \stopluacode @@ -45,10 +45,10 @@ % will go to math-ext -\Umathchardef\braceld=0 \mrfam "FF07A -\Umathchardef\bracerd=0 \mrfam "FF07B -\Umathchardef\bracelu=0 \mrfam "FF07C -\Umathchardef\braceru=0 \mrfam "FF07D +\Umathchardef\braceld=0 \defaultmathfamily "FF07A +\Umathchardef\bracerd=0 \defaultmathfamily "FF07B +\Umathchardef\bracelu=0 \defaultmathfamily "FF07C +\Umathchardef\braceru=0 \defaultmathfamily "FF07D % ctx specific @@ -224,7 +224,7 @@ %D $\sqrt[3]{10}$ %D \stoptyping -\def\rootradical{\Uroot 0 "221A } % can be done in char-def +\def\rootradical{\Uroot \defaultmathfamily "221A } % can be done in char-def \def\root#1\of{\rootradical{#1}} % #2 @@ -323,15 +323,39 @@ %D Goodies. We might move this elsewhere. -\def\underleftarrow #1{\mathop{\Uunderdelimiter 0 "2190 {#1}}} -\def\overleftarrow #1{\mathop{\Uoverdelimiter 0 "2190 {#1}}} -\def\underrightarrow#1{\mathop{\Uunderdelimiter 0 "2192 {#1}}} -\def\overrightarrow #1{\mathop{\Uoverdelimiter 0 "2192 {#1}}} - -% todo: \Udelimiterover, \Udelimiterunder - -\def\normaldoublebrace {\Umathaccents 0 0 "23DE 0 0 "23DF } -\def\normaldoubleparent{\Umathaccents 0 0 "23DC 0 0 "23DD } +% Be careful in choosing what accents you take (the code +% below uses a combining one): +% +% \startbuffer +% % $\Umathaccent top 0 0 "20D7 {example}$ +% % $\Umathaccent top fixed 0 0 "20D7 {example}$ +% $\Umathaccent 0 0 "20D7 {example}$ +% $\Umathaccent fixed 0 0 "20D7 {example}$ +% $\Umathaccent bottom 0 0 "20D7 {example}$ +% $\Umathaccent bottom fixed 0 0 "20D7 {example}$ +% $\Umathaccent both 0 0 "20D7 +% 0 0 "20D7 {example}$ +% $\Umathaccent both fixed 0 0 "20D7 +% fixed 0 0 "20D7 {example}$ +% $\Umathaccent both 0 0 "20D7 +% fixed 0 0 "20D7 {example}$ +% $\Umathaccent both fixed 0 0 "20D7 +% 0 0 "20D7 {example}$ +% \stopbuffer +% +% \setupbodyfont[modern] \getbuffer +% \setupbodyfont[xits] \getbuffer +% \setupbodyfont[cambria] \getbuffer + +\def\underleftarrow #1{\mathop{\Uunderdelimiter \defaultmathfamily "2190 {#1}}} +\def\overleftarrow #1{\mathop{\Uoverdelimiter \defaultmathfamily "2190 {#1}}} +\def\underrightarrow#1{\mathop{\Uunderdelimiter \defaultmathfamily "2192 {#1}}} +\def\overrightarrow #1{\mathop{\Uoverdelimiter \defaultmathfamily "2192 {#1}}} + +% watch out: here we have a class (zero): + +\def\normaldoublebrace {\Umathaccents 0 \defaultmathfamily "23DE 0 \defaultmathfamily "23DF } +\def\normaldoubleparent{\Umathaccents 0 \defaultmathfamily "23DC 0 \defaultmathfamily "23DD } \let\normaloverbrace \overbrace \let\normalunderbrace \underbrace @@ -365,8 +389,8 @@ % todo mathclass=punctuation ord -% \Umathcode"02C="6 "0 "02C -% \Umathcode"02E="0 "0 "02E +% \Umathcode"02C="6 \defaultmathfamily "02C +% \Umathcode"02E="0 \defaultmathfamily "02E % tricky .. todo diff --git a/tex/context/base/math-dim.lua b/tex/context/base/math-dim.lua index 7b4b49f68..8407fd077 100644 --- a/tex/context/base/math-dim.lua +++ b/tex/context/base/math-dim.lua @@ -22,171 +22,77 @@ if not modules then modules = { } end modules ['math-dim'] = { local abs, next = math.abs, next local defaults = { - ['axis']={ - ['default']={ "AxisHeight", "axis_height" }, - }, - ['accent_base_height']={ - ['default']={ "AccentBaseHeight", "x_height" }, - }, - ['fraction_del_size']={ - ['default']={ "FractionDelimiterSize", "delim2" }, - ['cramped_display_style']={ "FractionDelimiterDisplayStyleSize", "delim1" }, - ['display_style']={ "FractionDelimiterDisplayStyleSize", "delim1" }, - }, - ['fraction_denom_down']={ - ['default']={ "FractionDenominatorShiftDown", "denom2" }, - ['cramped_display_style']={ "FractionDenominatorDisplayStyleShiftDown", "denom1" }, - ['display_style']={ "FractionDenominatorDisplayStyleShiftDown", "denom1" }, - }, - ['fraction_denom_vgap']={ - ['default']={ "FractionDenominatorGapMin", "default_rule_thickness" }, - ['cramped_display_style']={ "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, - ['display_style']={ "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, - }, - ['fraction_num_up']={ - ['default']={ "FractionNumeratorShiftUp", "num2" }, - ['cramped_display_style']={ "FractionNumeratorDisplayStyleShiftUp", "num1" }, - ['display_style']={ "FractionNumeratorDisplayStyleShiftUp", "num1" }, - }, - ['fraction_num_vgap']={ - ['default']={ "FractionNumeratorGapMin", "default_rule_thickness" }, - ['cramped_display_style']={ "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, - ['display_style']={ "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, - }, - ['fraction_rule']={ - ['default']={ "FractionRuleThickness", "default_rule_thickness" }, - }, - ['limit_above_bgap']={ - ['default']={ "UpperLimitBaselineRiseMin", "big_op_spacing3" }, - }, - ['limit_above_kern']={ - ['default']={ "0", "big_op_spacing5" }, - }, - ['limit_above_vgap']={ - ['default']={ "UpperLimitGapMin", "big_op_spacing1" }, - }, - ['limit_below_bgap']={ - ['default']={ "LowerLimitBaselineDropMin", "big_op_spacing4" }, - }, - ['limit_below_kern']={ - ['default']={ "0", "big_op_spacing5" }, - }, - ['limit_below_vgap']={ - ['default']={ "LowerLimitGapMin", "big_op_spacing2" }, - }, - ---~ ['....']={ ---~ ['default']={ "DisplayOperatorMinHeight", "....." }, ---~ }, - - ['overbar_kern']={ - ['default']={ "OverbarExtraAscender", "default_rule_thickness" }, - }, - ['overbar_rule']={ - ['default']={ "OverbarRuleThickness", "default_rule_thickness" }, - }, - ['overbar_vgap']={ - ['default']={ "OverbarVerticalGap", "3*default_rule_thickness" }, - }, - ['quad']={ - ['default']={ "font_size(f)", "math_quad" }, - }, - ['radical_kern']={ - ['default']={ "RadicalExtraAscender", "default_rule_thickness" }, - }, - ['radical_rule']={ - ['default']={ "RadicalRuleThickness", "default_rule_thickness" }, - -- ['default']={ "surd_height(f)", "default_rule_thickness" }, - }, - ['radical_vgap']={ - ['default']={ "RadicalVerticalGap", "default_rule_thickness+(abs(default_rule_thickness)/4)" }, - ['display_style']={ "RadicalDisplayStyleVerticalGap", "default_rule_thickness+(abs(math_x_height)/4)" }, - }, - ['space_after_script']={ - ['default']={ "SpaceAfterScript", "script_space" }, - }, - ['stack_denom_down']={ - ['default']={ "StackBottomShiftDown", "denom2" }, - ['cramped_display_style']={ "StackBottomDisplayStyleShiftDown", "denom1" }, - ['display_style']={ "StackBottomDisplayStyleShiftDown", "denom1" }, - }, - ['stack_num_up']={ - ['default']={ "StackTopShiftUp", "num3" }, - ['cramped_display_style']={ "StackTopDisplayStyleShiftUp", "num1" }, - ['display_style']={ "StackTopDisplayStyleShiftUp", "num1" }, - }, - ['stack_vgap']={ - ['default']={ "StackGapMin", "3*default_rule_thickness" }, - ['cramped_display_style']={ "StackDisplayStyleGapMin", "7*default_rule_thickness" }, - ['display_style']={ "StackDisplayStyleGapMin", "7*default_rule_thickness" }, - }, - ['sub_shift_down']={ - ['default']={ "SubscriptShiftDown", "sub1" }, - }, - ['sub_shift_drop']={ - ['default']={ "SubscriptBaselineDropMin", "sub_drop" }, - }, - ['sub_sup_shift_down']={ - ['default']={ "SubscriptShiftDown", "sub2" }, -- todo - }, - ['sub_top_max']={ - ['default']={ "SubscriptTopMax", "abs(math_x_height*4)/5" }, - }, - ['subsup_vgap']={ - ['default']={ "SubSuperscriptGapMin", "4*default_rule_thickness" }, - }, - ['sup_bottom_min']={ - ['default']={ "SuperscriptBottomMin", "abs(math_x_height)/4" }, - }, - ['sup_shift_drop']={ - ['default']={ "SuperscriptBaselineDropMax", "sup_drop" }, - }, - ['sup_shift_up']={ - ['cramped_display_style']={ "SuperscriptShiftUpCramped", "sup3" }, - ['cramped_script_script_style']={ "SuperscriptShiftUpCramped", "sup3" }, - ['cramped_script_style']={ "SuperscriptShiftUpCramped", "sup3" }, - ['cramped_text_style']={ "SuperscriptShiftUpCramped", "sup3" }, - ['display_style']={ "SuperscriptShiftUp", "sup1" }, - ['script_script_style']={ "SuperscriptShiftUp", "sup2" }, - ['script_style']={ "SuperscriptShiftUp", "sup2" }, - ['text_style']={ "SuperscriptShiftUp", "sup2" }, - }, - ['sup_sub_bottom_max']={ - ['default']={ "SuperscriptBottomMaxWithSubscript", "abs(math_x_height*4)/5" }, - }, - ['underbar_kern']={ - ['default']={ "UnderbarExtraDescender", "0" }, - }, - ['underbar_rule']={ - ['default']={ "UnderbarRuleThickness", "default_rule_thickness" }, - }, - ['underbar_vgap']={ - ['default']={ "UnderbarVerticalGap", "3*default_rule_thickness" }, - }, - ['connector_overlap_min']={ - ['default']={ "MinConnectorOverlap", "0.25*default_rule_thickness" }, - }, - ['over_delimiter_vgap']={ - ['default']={ "StretchStackGapBelowMin", "big_op_spacing1" }, - }, - ['over_delimiter_bgap']={ - ['default']={ "StretchStackTopShiftUp", "big_op_spacing3" }, - }, - ['under_delimiter_vgap']={ - ['default']={ "StretchStackGapAboveMin", "big_op_spacing2" }, - }, - ['under_delimiter_bgap']={ - ['default']={ "StretchStackBottomShiftDown", "big_op_spacing4" }, - }, - ['radical_degree_before']={ - ['default']={ "RadicalKernBeforeDegree", "(5/18)*quad" }, - }, - ['radical_degree_after']={ - ['default']={ "RadicalKernAfterDegree", "(-10/18)*quad" }, - }, - ['radical_degree_raise']={ - ['default']={ "RadicalDegreeBottomRaisePercent", "60" }, - }, + axis = { default = { "AxisHeight", "axis_height" }, }, + accent_base_height = { default = { "AccentBaseHeight", "x_height" }, }, + fraction_del_size = { default = { "FractionDelimiterSize", "delim2" }, + cramped_display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, + display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, }, + fraction_denom_down = { default = { "FractionDenominatorShiftDown", "denom2" }, + cramped_display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, + display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, }, + fraction_denom_vgap = { default = { "FractionDenominatorGapMin", "default_rule_thickness" }, + cramped_display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, + display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, }, + fraction_num_up = { default = { "FractionNumeratorShiftUp", "num2" }, + cramped_display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, + display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, }, + fraction_num_vgap = { default = { "FractionNumeratorGapMin", "default_rule_thickness" }, + cramped_display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, + display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, }, + fraction_rule = { default = { "FractionRuleThickness", "default_rule_thickness" }, }, + limit_above_bgap = { default = { "UpperLimitBaselineRiseMin", "big_op_spacing3" }, }, + limit_above_vgap = { default = { "UpperLimitGapMin", "big_op_spacing1" }, }, + limit_above_kern = { default = { "0", "big_op_spacing5" }, }, + limit_below_bgap = { default = { "LowerLimitBaselineDropMin", "big_op_spacing4" }, }, + limit_below_vgap = { default = { "LowerLimitGapMin", "big_op_spacing2" }, }, + limit_below_kern = { default = { "0", "big_op_spacing5" }, }, +-- .... = { default = { "DisplayOperatorMinHeight", "....." }, }, + overbar_kern = { default = { "OverbarExtraAscender", "default_rule_thickness" }, }, + overbar_rule = { default = { "OverbarRuleThickness", "default_rule_thickness" }, }, + overbar_vgap = { default = { "OverbarVerticalGap", "3*default_rule_thickness" }, }, + quad = { default = { "font_size(f)", "math_quad" }, }, + radical_kern = { default = { "RadicalExtraAscender", "default_rule_thickness" }, }, + radical_rule = { default = { "RadicalRuleThickness", "default_rule_thickness" }, }, + -- default = { "surd_height(f)", "default_rule_thickness" }, + radical_vgap = { default = { "RadicalVerticalGap", "default_rule_thickness+(abs(default_rule_thickness)/4)" }, + display_style = { "RadicalDisplayStyleVerticalGap", "default_rule_thickness+(abs(math_x_height)/4)" }, }, + space_after_script = { default = { "SpaceAfterScript", "script_space" }, }, + stack_denom_down = { default = { "StackBottomShiftDown", "denom2" }, + cramped_display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, + display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, }, + stack_num_up = { default = { "StackTopShiftUp", "num3" }, + cramped_display_style = { "StackTopDisplayStyleShiftUp", "num1" }, + display_style = { "StackTopDisplayStyleShiftUp", "num1" }, }, + stack_vgap = { default = { "StackGapMin", "3*default_rule_thickness" }, + cramped_display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, + display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, }, + sub_shift_down = { default = { "SubscriptShiftDown", "sub1" }, }, + sub_shift_drop = { default = { "SubscriptBaselineDropMin", "sub_drop" }, }, + sub_sup_shift_down = { default = { "SubscriptShiftDown", "sub2" }, }, + sub_top_max = { default = { "SubscriptTopMax", "abs(math_x_height*4)/5" }, }, + subsup_vgap = { default = { "SubSuperscriptGapMin", "4*default_rule_thickness" }, }, + sup_bottom_min = { default = { "SuperscriptBottomMin", "abs(math_x_height)/4" }, }, + sup_shift_drop = { default = { "SuperscriptBaselineDropMax", "sup_drop" }, }, + sup_shift_up = { cramped_display_style = { "SuperscriptShiftUpCramped", "sup3" }, + cramped_script_script_style = { "SuperscriptShiftUpCramped", "sup3" }, + cramped_script_style = { "SuperscriptShiftUpCramped", "sup3" }, + cramped_text_style = { "SuperscriptShiftUpCramped", "sup3" }, + display_style = { "SuperscriptShiftUp", "sup1" }, + script_script_style = { "SuperscriptShiftUp", "sup2" }, + script_style = { "SuperscriptShiftUp", "sup2" }, + text_style = { "SuperscriptShiftUp", "sup2" }, }, + sup_sub_bottom_max = { default = { "SuperscriptBottomMaxWithSubscript", "abs(math_x_height*4)/5" }, }, + underbar_kern = { default = { "UnderbarExtraDescender", "0" }, }, + underbar_rule = { default = { "UnderbarRuleThickness", "default_rule_thickness" }, }, + underbar_vgap = { default = { "UnderbarVerticalGap", "3*default_rule_thickness" }, }, + connector_overlap_min = { default = { "MinConnectorOverlap", "0.25*default_rule_thickness" }, }, + over_delimiter_vgap = { default = { "StretchStackGapBelowMin", "big_op_spacing1" }, }, + over_delimiter_bgap = { default = { "StretchStackTopShiftUp", "big_op_spacing3" }, }, + under_delimiter_vgap = { default = { "StretchStackGapAboveMin", "big_op_spacing2" }, }, + under_delimiter_bgap = { default = { "StretchStackBottomShiftDown", "big_op_spacing4" }, }, + radical_degree_before = { default = { "RadicalKernBeforeDegree", "(5/18)*quad" }, }, + radical_degree_after = { default = { "RadicalKernAfterDegree", "(-10/18)*quad" }, }, + radical_degree_raise = { default = { "RadicalDegreeBottomRaisePercent", "60" }, }, } local styles = { diff --git a/tex/context/base/math-frc.mkiv b/tex/context/base/math-frc.mkiv index d69937ad3..9f3b3b135 100644 --- a/tex/context/base/math-frc.mkiv +++ b/tex/context/base/math-frc.mkiv @@ -161,7 +161,7 @@ % to be checked: \unexpanded\def\exmthfont#1% - {\symbolicsizedfont#1\plusone{MathExtension}} + {\mr} % \symbolicsizedfont#1\plusone{MathExtension}} \def\domthfrac#1#2#3#4#5#6#7% {\begingroup diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua index 358a7fbae..1516c16bc 100644 --- a/tex/context/base/math-ini.lua +++ b/tex/context/base/math-ini.lua @@ -27,7 +27,8 @@ mathematics.extrabase = 0xFE000 -- here we push some virtuals mathematics.privatebase = 0xFF000 -- here we push the ex local families = allocate { - tf = 0, it = 1, sl = 2, bf = 3, bi = 4, bs = 5, -- no longer relevant + mr = 0, + mb = 1, } local classes = allocate { @@ -66,9 +67,9 @@ local codes = allocate { variable = 7, [7] = "variable", } -mathematics.families = families mathematics.classes = classes mathematics.codes = codes +mathematics.families = families classes.alphabetic = classes.alpha classes.unknown = classes.nothing diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv index a6519dbe5..ad01fa0b2 100644 --- a/tex/context/base/math-ini.mkiv +++ b/tex/context/base/math-ini.mkiv @@ -27,8 +27,6 @@ \unprotect -\ifx\v!compact\undefined \def\v!compact{compact} \fi - %D We move these definitions into the format: % test [[\char948 \ctxlua{tex.sprint(utf.char(948))}]] @@ -61,6 +59,8 @@ \attribute\mathmodeattribute\plusone \to \everybeforedisplayformula +\setnewconstant\defaultmathfamily 255 + %D \macros %D {setupmathematics} %D @@ -95,7 +95,7 @@ \def\setmathattribute#1#2{\ifmmode\ctxlua{mathematics.syncboth ("#1","#2")}\fi} \def\setmathalphabet #1{\ifmmode\ctxlua{mathematics.syncname ("#1")}\fi} \def\setmathstyle #1{\ifmmode\ctxlua{mathematics.syncstyle("#1")}\fi} -\def\setmathalternate #1{\ifmmode\ctxlua{mathematics.setalternate(0,"#1")}\fi} % fam 0 +\def\setmathalternate #1{\ifmmode\ctxlua{mathematics.setalternate(\number\defaultmathfamily,"#1")}\fi} \def\setmathstylealterternate#1% {\ifcsname\??mo:\c!alternative:\fontclass:#1\endcsname @@ -158,14 +158,6 @@ \let\normalmr\mr -% \prependtoks -% \let\mr\normalmr -% \let\rm\mathrm \let\ss\mathss \let\tt\mathtt -% \let\tf\mathtf \let\bf\mathbf \let\it\mathit \let\sl\mathsl \let\bi\mathbi \let\bs\mathbs -% \let\frak\mathfraktur \let\cal\mathscript \let\bbd\mathblackboard -% \mathdefault -% \to \everymathematics - \let\normaltf\tf \unexpanded\def\tf{\ifmmode\mathtf\else\normaltf\fi} \let\normalbf\bf \unexpanded\def\bf{\ifmmode\mathbf\else\normalbf\fi} \let\normalit\it \unexpanded\def\it{\ifmmode\mathit\else\normalit\fi} @@ -179,14 +171,10 @@ \unexpanded\def\mr{\ifmmode \normalmr\fi} \prependtoks -% \let\mr\normalmr -% \let\rm\mathrm \let\ss\mathss \let\tt\mathtt -% \let\tf\mathtf \let\bf\mathbf \let\it\mathit \let\sl\mathsl \let\bi\mathbi \let\bs\mathbs -% \let\frak\mathfraktur \let\cal\mathscript \let\bbd\mathblackboard \mathdefault \to \everymathematics -%D We could set the renderign attribute at the \LUA\ end but as there +%D We could set the rendering attribute at the \LUA\ end but as there %D can be many small math snippets we keep track of the state at the %D \TEX\ end (mapping is export safe). %D @@ -632,13 +620,28 @@ %D Memory saver: \appendtoks - \doifelse{\mathematicsparameter\v!compact}\v!yes - {\ctxlua{fonts.handlers.vf.math.optional=true}} - {\ctxlua{fonts.handlers.vf.math.optional=false}}% + \doifelse{\mathematicsparameter\c!compact}\v!yes + {\ctxlua{fonts.handlers.vf.math.optional=true}}% + {\ctxlua{fonts.handlers.vf.math.optional=false}}% \to \everysetupmathematics \setupmathematics - [\v!compact=no] + [\c!compact=no] + +%D Arabic: + +\newconditional\mathematics_right_to_left + +\appendtoks + \doifelse{\mathematicsparameter\c!align}{r2l}% + {\settrue \mathematics_right_to_left}% + {\setfalse\mathematics_right_to_left}% +\to \everysetupmathematics + +\appendtoks + \mathdir T\ifconditional\mathematics_right_to_left R\else L\fi T +%to \everymathematics % comes too late and I'm not in the mood for a mixed mode kludge now +\to \everysetupmathematics %D Delayed: greek. %D diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua index 36922b60a..28965a09a 100644 --- a/tex/context/base/math-noa.lua +++ b/tex/context/base/math-noa.lua @@ -15,6 +15,9 @@ if not modules then modules = { } end modules ['math-noa'] = { -- beware: names will change as we wil make noads.xxx.handler i.e. xxx -- subnamespaces +-- 20D6 -> 2190 +-- 20D7 -> 2192 + local utf = unicode.utf8 local utfchar, utfbyte = utf.char, utf.byte @@ -590,7 +593,7 @@ registerotffeature { local getalternate = otf.getalternate -local mathalternate = attributes.private("mathalternate") +local a_mathalternate = attributes.private("mathalternate") local alternate = { } -- processors.alternate = alternate @@ -600,14 +603,14 @@ function mathematics.setalternate(fam,tag) local mathalternates = tfmdata.shared.mathalternates if mathalternates then local m = mathalternates[tag] - tex.attribute[mathalternate] = m and m.attribute or attributes.unsetvalue + tex.attribute[a_mathalternate] = m and m.attribute or attributes.unsetvalue end end alternate[math_char] = function(pointer) - local a = has_attribute(pointer,mathalternate) + local a = has_attribute(pointer,a_mathalternate) if a and a > 0 then - set_attribute(pointer,mathalternate,0) + set_attribute(pointer,a_mathalternate,0) local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata local mathalternatesattributes = tfmdata.shared.mathalternatesattributes if mathalternatesattributes then @@ -625,6 +628,46 @@ function handlers.check(head,style,penalties) return true end +-- experiment (when not present fall back to fam 0) + +local families = { } + +local a_mathfamily = attributes.private("mathfamily") + +families[math_char] = function(pointer) + if pointer.fam == 255 then + local a = has_attribute(pointer,a_mathfamily) + if a and a > 0 then + set_attribute(pointer,a_mathfamily,0) + pointer.fam = a + else + pointer.fam = 0 + end + end +end + +families[math_delim] = function(pointer) + if pointer.small_fam == 255 then + local a = has_attribute(pointer,a_mathfamily) + if a and a > 0 then + set_attribute(pointer,a_mathfamily,0) + pointer.small_fam = a + pointer.large_fam = a + else + pointer.small_fam = 0 + pointer.large_fam = 0 + end + end +end + +families[math_textchar] = families[math_char] + +function handlers.families(head,style,penalties) + processnoads(head,families,"families") + return true +end + + -- the normal builder function builders.kernel.mlist_to_hlist(head,style,penalties) diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua index ce7c9c864..49138ac3b 100644 --- a/tex/context/base/math-tag.lua +++ b/tex/context/base/math-tag.lua @@ -44,6 +44,7 @@ local a_mathmode = attributes.private('mathmode') local tags = structures.tags local start_tagged = tags.start +local restart_tagged = tags.restart local stop_tagged = tags.stop local taglist = tags.taglist @@ -164,31 +165,30 @@ process = function(start) -- we cannot use the processor as we have no finalizer local cache = { } -- we can have nested unboxed mess so best local to runner for n in traverse_nodes(list) do local id = n.id - if id == hlist_code or id == vlist_code then - runner(n.list) - else -- if id == glyph_code then - local aa = get_attribute(n,a_tagged) -- only glyph needed (huh?) - if aa then - local ac = cache[aa] - if not ac then - local tagdata = taglist[aa] - local extra = #tagdata - if common <= extra then - for i=common,extra do - ac = start_tagged(tagdata[i]) -- can be made faster - end - for i=common,extra do - stop_tagged() -- can be made faster - end - else - ac = text + local aa = get_attribute(n,a_tagged) + if aa then + local ac = cache[aa] + if not ac then + local tagdata = taglist[aa] + local extra = #tagdata + if common <= extra then + for i=common,extra do + ac = restart_tagged(tagdata[i]) -- can be made faster + end + for i=common,extra do + stop_tagged() -- can be made faster end - cache[aa] = ac + else + ac = text end - set_attribute(n,a_tagged,ac) - else - set_attribute(n,a_tagged,text) + cache[aa] = ac end + set_attribute(n,a_tagged,ac) + else + set_attribute(n,a_tagged,text) + end + if id == hlist_code or id == vlist_code then + runner(n.list) end end end diff --git a/tex/context/base/math-vfu.lua b/tex/context/base/math-vfu.lua index a5c9f68aa..8acf12db2 100644 --- a/tex/context/base/math-vfu.lua +++ b/tex/context/base/math-vfu.lua @@ -10,6 +10,9 @@ if not modules then modules = { } end modules ['math-vfu'] = { -- better and better. If you have problems with math fonts or miss -- characters report it to the ConTeXt mailing list. +-- 20D6 -> 2190 +-- 20D7 -> 2192 + local type, next = type, next local max = math.max @@ -85,12 +88,12 @@ local function arrow(main,characters,id,size,unicode,arrow,minus,isleft) elseif isleft then chr.horiz_variants = { { extender = 0, glyph = arrow }, - { extender = 1, glyph = minus }, + { extender = 1, glyph = minus }, } else chr.horiz_variants = { - { extender = 0, glyph = minus }, - { extender = 1, glyph = arrow }, + { extender = 1, glyph = minus }, + { extender = 0, glyph = arrow }, } end end @@ -313,8 +316,8 @@ function vfmath.addmissing(main,id,size) dots (main,characters,id,size,0x22F1) -- ddots dots (main,characters,id,size,0x22F0) -- udots minus (main,characters,id,size,0xFF501) - arrow (main,characters,id,size,0x2190,0xFE190,0xFF501,true) -- left - arrow (main,characters,id,size,0x2192,0xFE192,0xFF501,false) -- right + arrow (main,characters,id,size,0x2190,0x2190,0xFF501,true) -- left + arrow (main,characters,id,size,0x2192,0x2192,0xFF501,false) -- right vertbar (main,characters,id,size,0x0007C,0.10,0xFF601) -- big : 0.85 bodyfontsize vertbar (main,characters,id,size,0xFF601,0.30,0xFF602) -- Big : 1.15 bodyfontsize vertbar (main,characters,id,size,0xFF602,0.30,0xFF603) -- bigg : 1.45 bodyfontsize @@ -341,6 +344,12 @@ function vfmath.addmissing(main,id,size) jointhree(main,characters,id,size,0x27FB,0x02190,joinrelfactor,0x0002D,0,0xFE324) -- \leftarrow\joinrel\relbar\mapsfromchar jointhree(main,characters,id,size,0x27FC,0xFE321,0,0x0002D,joinrelfactor,0x02192) -- \mapstochar\relbar\joinrel\rightarrow jointwo (main,characters,id,size,0x2254,0x03A,0,0x03D) -- := (≔) + + -- there are more (needs discussion first): + + -- characters[0x20D6] = characters[0x2190] + -- characters[0x20D7] = characters[0x2192] + end local unique = 0 -- testcase: \startTEXpage \math{!\text{-}\text{-}\text{-}} \stopTEXpage @@ -724,6 +733,7 @@ function vfmath.define(specification,set,goodies) fonts.constructors.assignmathparameters(main,main) -- main.MathConstants = main.mathparameters -- we directly pass it to TeX (bypasses the scaler) so this is needed +--~ inspect(main.MathConstants) -- if trace_virtual or trace_timings then report_virtual("loading and virtualizing font %s at size %s took %0.3f seconds",name,size,os.clock()-start) diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii index 7a7ba6be2..eaf23937c 100644 --- a/tex/context/base/mult-de.mkii +++ b/tex/context/base/mult-de.mkii @@ -603,6 +603,7 @@ \setinterfaceconstant{commandafter}{zumbefehl} \setinterfaceconstant{commandbefore}{vorbefehl} \setinterfaceconstant{commands}{befehle} +\setinterfaceconstant{compact}{compact} \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua index a26c8c0cf..9fa64d1f5 100644 --- a/tex/context/base/mult-def.lua +++ b/tex/context/base/mult-def.lua @@ -6484,6 +6484,11 @@ return { }, ["order"]={ ["en"]="order", + ["nl"]="volgorde", + }, + ["compact"]={ + ["en"]="compact", + ["nl"]="compact", }, ["headalign"]={ ["en"]="headalign", diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii index f07fb688c..04ae75f82 100644 --- a/tex/context/base/mult-en.mkii +++ b/tex/context/base/mult-en.mkii @@ -603,6 +603,7 @@ \setinterfaceconstant{commandafter}{commandafter} \setinterfaceconstant{commandbefore}{commandbefore} \setinterfaceconstant{commands}{commands} +\setinterfaceconstant{compact}{compact} \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii index 8cf9ff3b1..5f682f1fc 100644 --- a/tex/context/base/mult-fr.mkii +++ b/tex/context/base/mult-fr.mkii @@ -603,6 +603,7 @@ \setinterfaceconstant{commandafter}{commandeapres} \setinterfaceconstant{commandbefore}{commandeavant} \setinterfaceconstant{commands}{commandes} +\setinterfaceconstant{compact}{compact} \setinterfaceconstant{component}{composant} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} diff --git a/tex/context/base/mult-ini.mkiv b/tex/context/base/mult-ini.mkiv index f7b5ef247..5d45353f6 100644 --- a/tex/context/base/mult-ini.mkiv +++ b/tex/context/base/mult-ini.mkiv @@ -792,6 +792,10 @@ %D macro's can be enhanced with more testing facilities, but %D for the moment they suffice. + +\ifdefined\zwnj \else \edef\zwnj{\directlua{utf.char(\number"200C)}} \fi % needed for cont-pe +\ifdefined\zwj \else \edef\zwj {\directlua{utf.char(\number"200D)}} \fi % needed for cont-pe + %D Out of convenience we define the banners here. This might move %D to the Lua end. diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii index 6692e7c06..754b7ae34 100644 --- a/tex/context/base/mult-it.mkii +++ b/tex/context/base/mult-it.mkii @@ -603,6 +603,7 @@ \setinterfaceconstant{commandafter}{comdandodopo} \setinterfaceconstant{commandbefore}{comandoprima} \setinterfaceconstant{commands}{comandi} +\setinterfaceconstant{compact}{compact} \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii index 732b467a0..a479ab53b 100644 --- a/tex/context/base/mult-nl.mkii +++ b/tex/context/base/mult-nl.mkii @@ -603,6 +603,7 @@ \setinterfaceconstant{commandafter}{nacommando} \setinterfaceconstant{commandbefore}{voorcommando} \setinterfaceconstant{commands}{commandos} +\setinterfaceconstant{compact}{compact} \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{koppelteken} \setinterfaceconstant{compress}{comprimeren} @@ -822,7 +823,7 @@ \setinterfaceconstant{openpage}{openpagina} \setinterfaceconstant{openpageaction}{openpaginaactie} \setinterfaceconstant{option}{optie} -\setinterfaceconstant{order}{order} +\setinterfaceconstant{order}{volgorde} \setinterfaceconstant{orientation}{orientatie} \setinterfaceconstant{otherstext}{otherstext} \setinterfaceconstant{outermargin}{buitenmarge} diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii index c08fd3cc0..22416cce3 100644 --- a/tex/context/base/mult-pe.mkii +++ b/tex/context/base/mult-pe.mkii @@ -603,6 +603,7 @@ \setinterfaceconstant{commandafter}{فرمانبعداز} \setinterfaceconstant{commandbefore}{فرمانقبلاز} \setinterfaceconstant{commands}{فرمانها} +\setinterfaceconstant{compact}{compact} \setinterfaceconstant{component}{مولفه} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{فشردن} diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii index 1fa93dd98..d9d6e3b62 100644 --- a/tex/context/base/mult-ro.mkii +++ b/tex/context/base/mult-ro.mkii @@ -603,6 +603,7 @@ \setinterfaceconstant{commandafter}{comandadupa} \setinterfaceconstant{commandbefore}{comandainainte} \setinterfaceconstant{commands}{comenzi} +\setinterfaceconstant{compact}{compact} \setinterfaceconstant{component}{component} \setinterfaceconstant{compoundhyphen}{compoundhyphen} \setinterfaceconstant{compress}{compress} diff --git a/tex/context/base/mult-sys.mkiv b/tex/context/base/mult-sys.mkiv index b65d15b45..06a4912a3 100644 --- a/tex/context/base/mult-sys.mkiv +++ b/tex/context/base/mult-sys.mkiv @@ -114,6 +114,8 @@ \definesystemconstant {name} \definesystemconstant {spec} +\definesystemconstant {both} + \definesystemconstant {attribute} \definesystemconstant {serif} @@ -245,6 +247,7 @@ \definesystemconstant {rscale} \definesystemconstant {handling} \definesystemconstant {features} +\definesystemconstant {direction} \definesystemconstant {fallbacks} \definesystemconstant {goodies} \definesystemconstant {background} @@ -316,6 +319,7 @@ \definesystemconstant {itemcount} \definesystemconstant {number} +\definesystemconstant {symbol} \definesystemconstant {references} \definesystemconstant {between} \definesystemconstant {format} @@ -687,6 +691,7 @@ \definesystemvariable {ty} % TYpe \definesystemvariable {uc} % Unicode \definesystemvariable {ui} % UItvoer +\definesystemvariable {un} % UNits \definesystemvariable {ur} % URl \definesystemvariable {up} % Utility Program \definesystemvariable {va} % VspAce @@ -710,7 +715,7 @@ \definesystemvariable {za} % ZetspiegelAanpassing %D Next we define some language independant one letter -%D variables and keywords. +%D variables and keywords. (should become s!) \defineinterfaceconstant {x} {x} % x offset \defineinterfaceconstant {y} {y} % y offset @@ -853,29 +858,31 @@ \defineinterfaceconstant {sl} {sl} \defineinterfaceconstant {it} {it} \defineinterfaceconstant {sc} {sc} + \defineinterfaceconstant {rm} {rm} \defineinterfaceconstant {ss} {ss} \defineinterfaceconstant {tt} {tt} \defineinterfaceconstant {hw} {hw} \defineinterfaceconstant {cg} {cg} -\defineinterfaceconstant {os} {os} \defineinterfaceconstant {mm} {mm} -\defineinterfaceconstant {i} {i} -\defineinterfaceconstant {nn} {nn} + +%defineinterfaceconstant {os} {os} +\defineinterfaceconstant {i} {i} % still used ? +%defineinterfaceconstant {nn} {nn} \defineinterfaceconstant {x} {x} \defineinterfaceconstant {xx} {xx} \defineinterfaceconstant {em} {em} -\defineinterfaceconstant {mi} {mi} -\defineinterfaceconstant {sy} {sy} -\defineinterfaceconstant {ex} {ex} \defineinterfaceconstant {mr} {mr} - -\defineinterfaceconstant {ma} {ma} \defineinterfaceconstant {mb} {mb} -\defineinterfaceconstant {mc} {mc} + +\defineinterfaceconstant {mrlr} {mrlr} +\defineinterfaceconstant {mrrl} {mrrl} + +\defineinterfaceconstant {mblr} {mblr} +\defineinterfaceconstant {mbrl} {mbrl} %D For figure inclusion we need: diff --git a/tex/context/base/page-sid.mkiv b/tex/context/base/page-sid.mkiv index 7ee7e2fed..8900608ab 100644 --- a/tex/context/base/page-sid.mkiv +++ b/tex/context/base/page-sid.mkiv @@ -614,6 +614,7 @@ \setsidefloatskips \logsidefloat \relax +\lefttoright % \textdir TLT\relax % or \ifconditional\displaylefttoright below (more work) \ifcase\sidefloattype % invalid \or % backspace diff --git a/tex/context/base/phys-dim.lua b/tex/context/base/phys-dim.lua index e27d48ffa..9fb2d031f 100644 --- a/tex/context/base/phys-dim.lua +++ b/tex/context/base/phys-dim.lua @@ -24,37 +24,31 @@ physics.patterns = physics.patterns or { } -- digits parser (todo : use patterns) -local done = false -local swap = false - -local digit = R("09") -local sign = S("+-") -local power = S("^e") -local digitspace = S("~@_") -local digitspacex = digitspace + P(" ") -local comma = P(",") -local period = P(".") -local signspace = P("/") -local positive = S("p") -local negative = S("n") -local highspace = P("s") -local padding = P("=") -local plus = P("+") -local minus = P("-") - --- rename context.digitsspace -> digitsS --- also have digitsN - - --- move done to tex end +--~ local done = false +--~ local mode = 0 + +local digit = R("09") +local sign = S("+-") +local power = S("^e") +local digitspace = S("~@_") +local digitspacex = digitspace + P(" ") +local comma = P(",") +local period = P(".") +local signspace = P("/") +local positive = S("p") +local negative = S("n") +local highspace = P("s") +local padding = P("=") +local plus = P("+") +local minus = P("-") local digits = (digit^1) local ddigitspacex = digitspacex / "" / context.digitsspace local ddigitspace = digitspace / "" / context.digitsspace -local ddigit = digits / function(s) done = true context(s) end -local dseparator = comma / "" / function() if not done then context.digitsseparatorspace() elseif swap then context(".") else context(",") end end - + period / "" / function() if not done then context.digitsseparatorspace() elseif swap then context(",") else context(".") end end +local ddigit = digits / context.digitsdigit +local dseparator = comma / "" / context.digitscomma + + period / "" / context.digitsperiod local dsignspace = signspace / "" / context.digitssignspace local dpositive = positive / "" / context.digitspositive local dnegative = negative / "" / context.digitsnegative @@ -68,7 +62,7 @@ local dpower = power / "" * ( ) local dpadding = padding / "" / context.digitszeropadding -- todo -local digitparsernospace = +local digitparserspace = (dsomesign + dsignspace + dpositive + dnegative + dhighspace)^0 * (dseparator^0 * (ddigitspacex + ddigit)^1)^1 * dpower^0 @@ -82,9 +76,8 @@ physics.patterns.digitparserspace = digitparserspace physics.patterns.digitparser = digitparser function commands.digits(str) - done = false - -- swap = true - matchlpeg(digitparserspace,str) -- also space +--~ done = false + matchlpeg(digitparserspace,str) end -- units parser diff --git a/tex/context/base/phys-dim.mkiv b/tex/context/base/phys-dim.mkiv index 0c56ed1b3..1e900bdb6 100644 --- a/tex/context/base/phys-dim.mkiv +++ b/tex/context/base/phys-dim.mkiv @@ -15,20 +15,94 @@ \unprotect -% We will have -% -% \setupunits -% [alternative=text, % maybe no longer -% grid=yes, % snapper -% style=..., % -% space=...] % small medium big - -% digits code: - +%D \macros +%D {digits, setdigitmode, setdigitsign, setdigitorder} +%D +%D This is an update of the \MKII\ digits mechanism. Beware, +%D space delimited mode is now resticted! +%D +%D Depending on the digit mode the command \type {\digits} +%D normalizes number patterns depending on the language set. +%D +%D \starttyping +%D This will never be a \digits{1.000.000} seller. +%D \stoptyping +%D +%D We still support the space delimited case but this is only for special +%D purposes. When used in the text, you'd better use the argument variant. +%D +%D \startbuffer +%D 1 \setdigitmode {1} \setdigitorder{0} \digits {12.345,90} +%D 2 \setdigitmode {2} \setdigitorder{0} \digits {12.345,90} +%D 3 \setdigitmode {3} \setdigitorder{0} \digits {12.345,90} +%D 4 \setdigitmode {4} \setdigitorder{0} \digits {12.345,90} +%D 5 \setdigitmode {5} \setdigitorder{0} \digits {12.345,90} +%D 6 \setdigitmode {6} \setdigitorder{0} \digits {12.345,90} +%D 1 \setdigitmode {1} \setdigitorder{1} \digits {12.345,90} +%D 2 \setdigitmode {2} \setdigitorder{1} \digits {12.345,90} +%D 3 \setdigitmode {3} \setdigitorder{1} \digits {12.345,90} +%D 4 \setdigitmode {4} \setdigitorder{1} \digits {12.345,90} +%D 5 \setdigitmode {5} \setdigitorder{1} \digits {12.345,90} +%D 6 \setdigitmode {6} \setdigitorder{1} \digits {12.345,90} +%D \stopbuffer +%D +%D \typebuffer +%D +%D This is typeset as: +%D +%D \startlines \getbuffer \stoplines +%D +%D The sign can be typeset as is or within the space of a +%D digit. +%D +%D \startbuffer +%D \setdigitsign 0 \digits {+12.345,90} +%D \setdigitsign 1 \digits {+12.345,90} +%D \stopbuffer +%D +%D \typebuffer +%D +%D This is typset as: +%D +%D \startlines +%D \getbuffer +%D \stoplines +%D +%D The digit modes are: +%D +%D \startitemize[n,packed] +%D \item periods/comma +%D \item commas/period +%D \item thinmuskips/comma +%D \item thinmuskips/period +%D \item thickmuskips/comma +%D \item thickmuskips/period +%D \stopitemize +%D +%D The digit parser handles a bunch of special characters as +%D well as different formats. We strongly suggest you to use +%D the grouped call. +%D +%D \starttabulate[|l|l|l|] +%D \NC \type{.} \NC , . \NC comma or period \NC \NR +%D \NC \type{,} \NC , . \NC comma or period \NC \NR +%D \NC \type{@} \NC \NC invisible space \NC \NR +%D \NC \type{_} \NC \NC invisible space \NC \NR +%D \NC \type{/} \NC \NC invisible sign \NC \NR +%D \NC \type{-} \NC $-$ \NC minus sign \NC \NR +%D \NC \type{+} \NC $+$ \NC plus sign \NC \NR +%D \NC \type{s} \NC \NC invisible high sign \NC \NR +%D \NC \type{p} \NC $\positive$ \NC high plus sign \NC \NR +%D \NC \type{m} \NC $\negative$ \NC high minus sign \NC \NR +%D \NC \type{n} \NC $\negative$ \NC high minus (negative) sign \NC \NR +%D \NC \type{=} \NC $\zeroamount$ \NC zero padding \NC \NR +%D \stoptabulate +%D +%D These triggers are used in the following examples. +%D %D \startbuffer %D \digits{12} %D \digits{~~~.~~~.~~~.68.712,34} -%D \digits{~~~.~~~.~~~.68.712,34} %D \digits{___.___.111.68.712,34} %D \digits{111.111.111.68.712,34} %D \digits{12.345,90} @@ -40,47 +114,125 @@ %D \digits{123.222,==} %D \digits{123.222,00^10} %D \digits{123.222,00e10} -%D \digits{-123.222,00e-12} %D \digits{/123.222,00e-12} +%D \digits{-123.222,00e-12} %D \digits{+123.222,00e-12} %D \digits{n123.222,00e-12} %D \digits{s123.222,00e-12} -%D \digits{p123.222,00e/12} -%D %D \stopbuffer +%D \digits{p123.222,00e-12} +%D \stopbuffer %D %D \typebuffer %D -%D Result: \getbuffer +%D \startlines +%D \getbuffer +%D \stoplines + +% todo: smaller adn raised signs + +\newconditional\c_digits_seen +\newconstant \c_digits_mode +\newconstant \c_digits_sign % we has sized (text script scriptscript) +\newconstant \c_digits_order % 0=period comma 1=comma period + +\def\setdigitmode #1{\c_digits_mode #1\relax} +\def\setdigitsign #1{\c_digits_sign #1\relax} +\def\setdigitorder#1{\c_digits_order#1\relax} + +\def\digits_normalized % we could calculate once and remember + {\ifcase\c_digits_sign + \expandafter\secondoftwoarguments + \else\ifmmode + \expandafter\expandafter\expandafter\digits_normalized_math + \else + \expandafter\expandafter\expandafter\digits_normalized_text + \fi\fi} -% todo: ifmmode +\def\digits_normalized_math#1#2% + {\setbox\scratchbox\hbox{$\Ustack{#1}$}% + \hbox to \wd\scratchbox{\hss{$\Ustack{#2}$}\hss}} -\def\digitsnormalized#1#2{{\setbox\scratchbox\hbox{#1}\hbox to \wd\scratchbox{\hss#2\hss}}} +\def\digits_normalized_text#1#2% + {\setbox\scratchbox\hbox{#1}% + \hbox to \wd\scratchbox{\hss#2\hss}} -\def\digitsraised {\ifmmode\expandafter\normalsuperscript\else\expandafter\high\fi} +\def\digits_raised + {\ifmmode + \expandafter\normalsuperscript + \else + \expandafter\high + \fi} \def\digitszeropadding {\zeroamount} -\def\digitsnegative {\digitsnormalized\zeroamount{\digitsraised{\textminus}}} % \mathematics \negative -\def\digitspositive {\digitsnormalized\zeroamount{\digitsraised{\textplus }}} % \mathematics \positive -\def\digitsnegative {\mathematics\negative} -\def\digitspositive {\mathematics\positive} +\def\digitsnegative {\digits_normalized{0}{\digits_raised{\textminus}}} +\def\digitspositive {\digits_normalized{0}{\digits_raised{\textplus}}} +\def\digitsnegative {\digits_normalized{0}{\mathematics{\negative}}} +\def\digitspositive {\digits_normalized{0}{\mathematics{\positive}}} +\def\digitsminus {\digits_normalized{0}{\mathematics{-}}} +\def\digitsplus {\digits_normalized{0}{\mathematics{+}}} \def\digitsspace {\hphantom{0}} \def\digitsseparatorspace{\hphantom{.}} \def\digitssignspace {\hphantom{\digitsminus}} \def\digitshighspace {\hphantom{\digitspositive}} -\def\digitspower {\digitsraised} -\def\digitspowerplus #1{\digitsraised{\digitsplus #1}} -\def\digitspowerminus #1{\digitsraised{\digitsminus#1}} -\def\digitsminus {\mathematics-} -\def\digitsplus {\mathematics+} +\def\digitspower {\digits_raised} +\def\digitspowerplus #1{\digits_raised{\digitsplus#1}} +\def\digitspowerminus #1{\digits_raised{\digitsminus#1}} + +% we can move the logic to lua -% First I need to check the old supp-num code for compatibility. +\def\digitsdigit #1{\ifconditional\c_digits_seen#1\else\ifnum#1=\zerocount\digitsspace\else\settrue\c_digits_seen#1\fi\fi} +\def\digitscomma {\ifconditional\c_digits_seen\ifconditional\c_digits_order\digitscommasymbol \else\digitsperiodsymbol\fi\else\digitsseparatorspace\fi} +\def\digitsperiod {\ifconditional\c_digits_seen\ifconditional\c_digits_order\digitsperiodsymbol\else\digitscommasymbol \fi\else\digitsseparatorspace\fi} + +\def\normaldigitscommasymbol {,} +\def\normaldigitsperiodsymbol{.} + +\let\digitscommasymbol \normaldigitscommasymbol +\let\digitsperiodsymbol\normaldigitsperiodsymbol \unexpanded\def\tempdigits#1% for testing {\dontleavehmode - \ctxcommand{digits(\!!bs\detokenize{#1}\!!es)}} + \begingroup + \setfalse\c_digits_seen + \ifcase\c_digits_mode + % keep as-is + \or + \let\digitscommasymbol \normaldigitsperiodsymbol + \let\digitsperiodsymbol \normaldigitscommasymbol + \or + \let\digitscommasymbol \normaldigitscommasymbol + \let\digitsperiodsymbol \normaldigitsperiodsymbol + \or + \let\digitsperiodsymbol \thinspace + \let\digitscommasymbol \normaldigitscommasymbol + \let\digitsseparatorspace\thinspace + \or + \let\digitsperiodsymbol \thinspace + \let\digitscommasymbol \normaldigitsperiodsymbol + \let\digitsseparatorspace\thinspace + \or + \let\digitsperiodsymbol \thickspace + \let\digitscommasymbol \normaldigitscommasymbol + \let\digitsseparatorspace\thickspace + \or + \let\digitsperiodsymbol \thickspace + \let\digitscommasymbol \normaldigitsperiodsymbol + \let\digitsseparatorspace\thickspace + \fi + \ctxcommand{digits(\!!bs\detokenize{#1}\!!es)}% + \endgroup} + +\unexpanded\def\digits + {\doifnextbgroupelse\digits_argument\digits_spaced} + +\def\digits_argument#1% + {\tempdigits{#1}} + +\def\digits_spaced#1 % + {\tempdigits{#1}} %D \macros -%D {su} +%D {unit} %D %D We have been using the units module (and its predecessor) for over a decade %D now but when we moved on to \LUATEX\ a variant was prototyped that permits a @@ -89,94 +241,127 @@ %D test exporting.) %D %D \startbuffer -%D 01: $10\su{km/h}$ -%D 02: $\su{10 km/h}$ -%D 03: \su{km/h} -%D 04: \su{10 km/h} -%D 05: \su{10 km/h} -%D 06: \su{~1 km/h} -%D 07: 10\su{km/h} -%D 08: 10 \su{km/h} -%D 09: $10 \su{km/h}$ -%D 10: 10 \su{KiloMeter/Hour} -%D 11: 10 \su{kilometer/hour} -%D 12: 10 \su{km/h} -%D 13: 10 \su{kilometer per hour} -%D 14: 10 \su{km / h} -%D 15: 10 \su{ km / h } -%D 16: 10 \su{km/ms2} -%D 17: 10 \su{meter per second} -%D 18: 10 \su{cubic meter} -%D 19: 10 \su{cubic meter per second} -%D 21: 10 \su{cubic meter / second} -%D 22: $10 \su{cubic meter / second}$ -%D 23: 30 \su{kilo pascal } -%D 24: 30 \su{kilo pascal square meter / second} -%D 25: 30 \su{kilo pascal square meter / kelvin second} -%D 26: \su{30 kilo pascal square meter / kelvin second} -%D 27: $30 \su{kilo pascal square meter / kelvin second }$ -%D 28: 30 \su{crap} -%D 29: 30 \su{AC} -%D 30: $\frac{10 \su{m/s}}{20 \su{m/s}} $ -%D 31: {\ss 30 \su{kilo pascal square meter / second kelvin}} -%D 32: \su{123.22^-3 km/s} -%D 33: \su{123.22e-3 km/s} +%D 01: $10\unit{km/h}$ +%D 02: $\unit{10 km/h}$ +%D 03: \unit{km/h} +%D 04: \unit{10 km/h} +%D 05: \unit{10 km/h} +%D 06: \unit{~1 km/h} +%D 07: 10\unit{km/h} +%D 08: 10 \unit{km/h} +%D 09: $10 \unit{km/h}$ +%D 10: 10 \unit{KiloMeter/Hour} +%D 11: 10 \unit{kilometer/hour} +%D 12: 10 \unit{km/h} +%D 13: 10 \unit{kilometer per hour} +%D 14: 10 \unit{km / h} +%D 15: 10 \unit{ km / h } +%D 16: 10 \unit{km/ms2} +%D 17: 10 \unit{meter per second} +%D 18: 10 \unit{cubic meter} +%D 19: 10 \unit{cubic meter per second} +%D 21: 10 \unit{cubic meter / second} +%D 22: $10 \unit{cubic meter / second}$ +%D 23: 30 \unit{kilo pascal } +%D 24: 30 \unit{kilo pascal square meter / second} +%D 25: 30 \unit{kilo pascal square meter / kelvin second} +%D 26: \unit{30 kilo pascal square meter / kelvin second} +%D 27: $30 \unit{kilo pascal square meter / kelvin second }$ +%D 28: 30 \unit{crap} +%D 29: 30 \unit{AC} +%D 30: $\frac{10 \unit{m/s}}{20 \unit{m/s}} $ +%D 31: {\ss 30 \unit{kilo pascal square meter / second kelvin}} +%D 32: \unit{123.22^-3 km/s} +%D 33: \unit{123.22e-3 km/s} %D \stopbuffer %D %D \typebuffer %D -%D Result: \getbuffer +%D Result: \startlines \getbuffer \stoplines +%D +%D Depending on needs we can add more tweaks (also depends on to what +%D extent we need to be compatible with \MKII. -\newconstant \c_units_mode % 0=text 1=math +\installcommandhandler \??un {units} \??un + +\setupunits + [alternative=, % done: text + %grid=yes, % (maybe) + %style=..., % done + %color=..., % done + %space=..., % (maybe) small medium big + ] + +\newconstant \c_units_mode % 0=text 1=math 2=textinmath \newconstant \c_units_state % 0=start 1=suffix 2=operator 3=unit 4=prefix 5=number \newconditional\c_units_quantity \newconditional\c_units_number -% tags and export -% smash == snapper -% hbox ook in mmode - \def\unitshalfspace{\thinspace} \def\unitsbackspace{\negthinspace} \newtoks \everyunits % we keep the old \units command so we need a longer one -\unexpanded\def\unit#1% +\appendtoks + \setuevalue\currentunits{\units_direct{\currentunits}} +\to \everydefineunits + +\unexpanded\def\units_direct#1% {\begingroup + \edef\currentunits{#1}% \the\everyunits %\removeunwantedspaces % now ok yet + \dosetunitsattributes\c!style\c!color \ifmmode - \c_units_mode\plusone - \rm\tf - \mathtf - \fi + \edef\currentunitsalternative{\unitsparameter\c!alternative}% + \ifx\currentunitsalternative\v!text + \expandafter\expandafter\expandafter\units_direct_text_in_math + \else + \expandafter\expandafter\expandafter\units_direct_math + \fi + \else + \expandafter\units_direct_text + \fi} + +\unexpanded\def\units_direct_text_in_math#1% + {\mathtext{% + \c_units_mode\plustwo + \units_indeed{#1}% + \units_finish + }% + \endgroup} + +\unexpanded\def\units_direct_math#1% + {\c_units_mode\plusone + \rm\tf + \mathtf \units_indeed{#1}% \units_finish \endgroup} -\appendtoks - \let\unit\units_indeed -\to \everyunits +\unexpanded\def\units_direct_text#1% + {\units_indeed{#1}% + \units_finish + \endgroup} -% bonus ... we might go for \un instead or maybe du (digit+unit) +\unexpanded\def\units_direct_nested#1#2% + {\units_indeed{#2}} -% \let\su\unit -% -% \appendtoks -% \let\su\units_indeed -% \to \everyunits +\appendtoks + \let\units_direct\units_direct_nested +\to \everyunits \unexpanded\def\units_indeed#1{\ctxcommand{unit(\!!bs\detokenize{#1}\!!es)}} \unexpanded\def\unitsPUS#1#2#3{\units_next#1#2\unitsraise{#3}\c_units_state\plusone} % suffix -\unexpanded\def\unitsPU #1#2{\units_next#1#2\c_units_state \plusthree} % unit +\unexpanded\def\unitsPU #1#2{\units_next#1#2\c_units_state \plusthree} % unit \unexpanded\def\unitsPS #1#2{\units_next#1\unitsraise{#2}\c_units_state \plusone} % suffix \unexpanded\def\unitsUS #1#2{\units_next#1\unitsraise{#2}\c_units_state \plusone} % suffix -\unexpanded\def\unitsP #1{\units_next#1\c_units_state \plusfour} % prefix -\unexpanded\def\unitsU #1{\units_next#1\c_units_state \plusthree} % unit +\unexpanded\def\unitsP #1{\units_next#1\c_units_state \plusfour} % prefix +\unexpanded\def\unitsU #1{\units_next#1\c_units_state \plusthree} % unit \unexpanded\def\unitsS #1{\units_start{}\unitsraise{#1}\c_units_state \plusone} % suffix -\unexpanded\def\unitsO #1{\units_start#1\c_units_state \plustwo} % operator -\unexpanded\def\unitsN #1{\units_start#1\c_units_state \plusfive} % number +\unexpanded\def\unitsO #1{\units_start#1\c_units_state \plustwo} % operator +\unexpanded\def\unitsN #1{\units_start#1\c_units_state \plusfive} % number \setelementnature[unit] [mixed] \setelementnature[quantity][mixed] @@ -280,4 +465,9 @@ {/}% }%\unitsbackspace} +%D Now we define the standard units command: + +\defineunits + [unit] + \protect \endinput diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua index a440b6577..d94c7b0dc 100644 --- a/tex/context/base/regi-ini.lua +++ b/tex/context/base/regi-ini.lua @@ -145,8 +145,8 @@ regimes.disable = disable local level = 0 -function regimes.process(str) - if level == 0 then +function regimes.process(str,filename,currentline,noflines,coding) + if level == 0 and coding ~= "utf-8" then str = translate(str,currentregime) if trace_translating then report_translating("utf: %s",str) diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua index ef036cdf6..b0f39c751 100644 --- a/tex/context/base/scrp-ini.lua +++ b/tex/context/base/scrp-ini.lua @@ -173,8 +173,11 @@ if not next(hash) then for i=0x01160,0x011A7 do if not hash[i] then hash[i] = "jamo_medial" end end for i=0x011A8,0x011FF do if not hash[i] then hash[i] = "jamo_final" end end +-- for i=0x03041,0x030FF do if not hash[i] then hash[i] = "japanese" end end + for i=0x01200,0x0139F do hash[i] = "ethiopic_syllable" end + hash[0x01361] = "ethiopic_word" hash[0x01362] = "ethiopic_sentence" diff --git a/tex/context/base/spac-ali.mkiv b/tex/context/base/spac-ali.mkiv index 7285d9eea..686bcfb83 100644 --- a/tex/context/base/spac-ali.mkiv +++ b/tex/context/base/spac-ali.mkiv @@ -74,6 +74,43 @@ \ifconditional\displaylefttoright \else ddir="r2l", \fi} + +\def\synchronizelayoutdirection + {\ifconditional\layoutlefttoright + \settrue\inlinelefttoright + \textdir TLT\relax + \pardir TLT\relax + \else + \setfalse\inlinelefttoright + \textdir TRT\relax + \pardir TRT\relax + \fi} + +\def\synchronizedisplaydirection + {\ifconditional\displaylefttoright + \settrue\inlinelefttoright + \textdir TLT\relax + \pardir TLT\relax + \else + \setfalse\inlinelefttoright + \textdir TRT\relax + \pardir TRT\relax + \fi} + +\def\synchronizeinlinedirection + {\ifconditional\inlinelefttoright + \textdir TLT\relax + \else + \textdir TRT\relax + \fi} + +\def\showdirections + {\dontleavehmode + \begingroup\infofont\textdir TLT[\space + layout:\ifconditional \layoutlefttoright l2r\else r2l\fi\space + display:\ifconditional\displaylefttoright l2r\else r2l\fi\space + inline:\ifconditional \inlinelefttoright l2r\else r2l\fi\space + ]\endgroup} \def\dodefinehbox[#1][#2]% {\setvalue{hbox#1}##1{\hbox to #2{\begstrut##1\endstrut\hss}}} diff --git a/tex/context/base/spac-chr.mkiv b/tex/context/base/spac-chr.mkiv index fb1203b3e..1e7943ea1 100644 --- a/tex/context/base/spac-chr.mkiv +++ b/tex/context/base/spac-chr.mkiv @@ -32,6 +32,8 @@ \edef\breakablethinspace {\utfchar{"2009}} \edef\hairspace {\utfchar{"200A}} \edef\zerowidthspace {\utfchar{"200B}} +\edef\zwnj {\utfchar{"200C}} +\edef\zwj {\utfchar{"200D}} \edef\narrownobreakspace {\utfchar{"202F}} @@ -41,7 +43,9 @@ % Shortcuts: -\let~\nobreakspace +% unexpanded as otherwise we need to intercept / cleanup a lot + +\unexpanded\edef~{\utfchar{"00A0}} % Goodies: diff --git a/tex/context/base/spac-hor.mkiv b/tex/context/base/spac-hor.mkiv index 9afb0039b..e121a1d52 100644 --- a/tex/context/base/spac-hor.mkiv +++ b/tex/context/base/spac-hor.mkiv @@ -915,8 +915,8 @@ %D node insertions. We might even expand it to utf then as it then %D can be used in string comparison (not that much needed anyway). -\chardef\zwnj="200C -\chardef\zwj ="200D +% \chardef\zwnj="200C +% \chardef\zwj ="200D % TODO (but used in languages): diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf Binary files differindex b9e7254c3..94506a086 100644 --- a/tex/context/base/status-files.pdf +++ b/tex/context/base/status-files.pdf diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf Binary files differindex 608937f8e..f969e272d 100644 --- a/tex/context/base/status-lua.pdf +++ b/tex/context/base/status-lua.pdf diff --git a/tex/context/base/strc-des.mkiv b/tex/context/base/strc-des.mkiv index d67525ae8..f1ff7df69 100644 --- a/tex/context/base/strc-des.mkiv +++ b/tex/context/base/strc-des.mkiv @@ -507,6 +507,7 @@ \def\dodescriptiontext {\csname\??dd:\descriptionparameter\s!handler:\s!handler:\s!text \endcsname} \def\dodescriptionnumber {\csname\??dd:\descriptionparameter\s!handler:\s!handler:\s!number\endcsname} +\def\dodescriptionsymbol {\csname\??dd:\descriptionparameter\s!handler:\s!handler:\s!symbol\endcsname} \def\dodescriptionhandler {\csname\??dd:\descriptionparameter\s!handler:\s!handler \endcsname} \def\dohandledescriptiondo {\csname\??dd:\descriptionparameter\s!handler:\s!handler:\s!do \endcsname} \def\dohandledescriptionstart{\csname\??dd:\descriptionparameter\s!handler:\s!handler:\s!start \endcsname} @@ -645,13 +646,15 @@ % implementation -\newtoks \everyenumeration -\newconditional\enumerationnumberenabled -\def \enumerationdisablenumbersignal {-} +\newtoks \everyenumeration +\newconstant\enumerationnumberstate % 1 == enabled +\def \enumerationsignalskip {-} % 0 == disabled +\def \enumerationsignalsymbol{+} % 2 == disabled but symbol \appendtoks \disablepseudocaps \to \everyenumeration % sorry, uppercase causes troubles \setvalue{\??dd:\v!enumeration:\s!handler:\s!number}{\@@doenumerationnumber} +\setvalue{\??dd:\v!enumeration:\s!handler:\s!symbol}{\@@doenumerationsymbol} \setvalue{\??dd:\v!enumeration:\s!handler:\s!text }{\@@doenumerationtext} \setvalue{\??dd:\v!enumeration:\s!handler }{\@@doenumerationhandler} \setvalue{\??dd:\v!enumeration:\s!handler:\s!do }{\@@somedescription} @@ -665,6 +668,9 @@ \def\@@doenumerationnumber {\ctxlua{structures.lists.savedprefixednumber("\currentdescriptionmain",\currentdescriptionnumberentry)}} +\def\@@doenumerationsymbol + {\descriptionparameter\c!symbol} + \def\@@dodoenumerationtext {\ctxlua{structures.lists.savedtitle("\currentdescriptionmain",\currentdescriptionnumberentry)}} @@ -675,8 +681,15 @@ \descriptionparameter\c!stopper \descriptionparameter\c!right} +\def\doenumerationsymbol + {\descriptionparameter\c!left + \dodescriptionsymbol + \descriptionparameter\c!right} + \def\@@doenumerationtext - {\ifconditional\enumerationnumberenabled + {\ifcase\enumerationnumberstate + \doenumerationfullnumbernop + \or \iftrialtypesetting \doenumerationfullnumberyes \doenumerationcouplingsymbol @@ -685,29 +698,19 @@ \doenumerationfullnumberyes \doenumerationcouplingsymbol \fi - \else - \doenumerationfullnumbernop + \or + \doenumerationfullnumbersymbol \fi} -\def\doenumerationfullnumberyes % text, title - {\begingroup - \dosetdescriptionattributes\c!headstyle\c!headcolor - \the\everyenumeration - \descriptionparameter\c!command{\strut\showdntext\doenumerationnumber\doenumerationextratext}% - \endgroup} - -\def\doenumerationfullnumbernop % text, title - {\begingroup - \dosetdescriptionattributes\c!headstyle\c!headcolor - \the\everyenumeration - \descriptionparameter\c!command{\strut\showdnpuretext\doenumerationextratext}% - \endgroup} +\def\doenumerationfullnumberyes {\doenumerationfullnumber{\showdntext \doenumerationnumber\doenumerationextratext}} +\def\doenumerationfullnumbernop {\doenumerationfullnumber{\showdnpuretext \doenumerationextratext}} +\def\doenumerationfullnumbersymbol{\doenumerationfullnumber{\showdntext \doenumerationsymbol\doenumerationextratext}} \def\doenumerationfullnumber#1% text, title (used in notes) .. todo {\begingroup \dosetdescriptionattributes\c!headstyle\c!headcolor \the\everyenumeration - \descriptionparameter\c!command{\strut#1\doenumerationnumber\doenumerationextratext}% + \descriptionparameter\c!command{\strut#1}% \endgroup} \def\doenumerationextratext @@ -734,10 +737,14 @@ \def\doenumerationcheckconditions {\doifelse{\descriptionparameter\c!number}\v!yes - {\ifx\currentdescriptionreference\enumerationdisablenumbersignal - \setfalse\enumerationnumberenabled \else \settrue\enumerationnumberenabled - \fi}% - {\setfalse\enumerationnumberenabled}% + {\ifx\currentdescriptionreference\enumerationsignalsymbol + \enumerationnumberstate\plustwo % symbol + \else\ifx\currentdescriptionreference\enumerationsignalskip + \enumerationnumberstate\zerocount % disabled + \else + \enumerationnumberstate\plusone % enabled + \fi\fi}% + {\enumerationnumberstate\zerocount}% % disabled \edef\currentenumerationcoupling{\descriptionparameter\c!coupling}} \def\doenumerationregistercoupling @@ -842,8 +849,12 @@ \global\nodescriptioncaptiontrue\global\nodescriptionnumbertrue \fi % - \ifconditional\enumerationnumberenabled + \ifcase\enumerationnumberstate + % disabled + \or \doifelsedescriptioncomponent\donothing\doenumerationincrementcounter + \or + % symbol \fi % \ifnodescriptioncaption @@ -879,7 +890,9 @@ list = \!!bs\detokenize\expandafter{\currentdescriptionlist }\!!es, \fi % \fi }, - \ifconditional\enumerationnumberenabled + \ifcase\enumerationnumberstate + % disabled + \or prefixdata = { prefix = "\descriptionparameter\c!prefix", separatorset = "\descriptionparameter\c!prefixseparatorset", @@ -898,6 +911,8 @@ stopper = \!!bs\descriptionparameter\c!numberstopper\!!es, segments = "\descriptionparameter\c!numbersegments", }, + \or + % symbol \fi userdata = structures.helpers.touserdata(\!!bs\detokenize{#2}\!!es) } diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua index d50d49b92..36c2578c8 100644 --- a/tex/context/base/strc-doc.lua +++ b/tex/context/base/strc-doc.lua @@ -49,37 +49,36 @@ local data function documents.initialize() data = { - numbers = { }, - forced = { }, + numbers = { }, + forced = { }, ownnumbers = { }, - status = { }, - checkers = { }, - depth = 0, - blocks = { }, - block = "", + status = { }, + checkers = { }, + depth = 0, + blocks = { }, + block = "", } documents.data = data end function documents.reset() - data.numbers = { } - data.forced = { } + data.numbers = { } + data.forced = { } data.ownnumbers = { } - data.status = { } ---~ data.checkers = { } - data.depth = 0 + data.status = { } + -- data.checkers = { } + data.depth = 0 end documents.initialize() -- -- -- sections -- -- -- +local collected = allocate() +local tobesaved = allocate() -local collected = allocate() -local tobesaved = allocate() - -sections.collected = collected -sections.tobesaved = tobesaved +sections.collected = collected +sections.tobesaved = tobesaved --~ local function initializer() --~ collected = sections.collected @@ -88,6 +87,15 @@ sections.tobesaved = tobesaved --~ job.register('structures.sections.collected', tobesaved, initializer) +sections.registered = sections.registered or allocate() +local registered = sections.registered + +storage.register("structures/sections/registered", registered, "structures.sections.registered") + +function sections.register(name,specification) + registered[name] = specification +end + function sections.currentid() return #tobesaved end @@ -691,29 +699,36 @@ function sections.title() end end -function sections.findnumber(depth,what) +function sections.findnumber(depth,what) -- needs checking (looks wrong and slow too) local data = data.status[depth or data.depth] if data then local index = data.references.section local collected = sections.collected local sectiondata = collected[index] if sectiondata and sectiondata.hidenumber ~= true then -- can be nil - if what == variables.first then + local quit = what == variables.previous or what == variables.next + if what == variables.first or what == variables.previous then for i=index,1,-1 do local s = collected[i] local n = s.numbers if #n == depth and n[depth] and n[depth] ~= 0 then sectiondata = s + if quit then + break + end elseif #n < depth then break end end - elseif what == variables.last then + elseif what == variables.last or what == variables.next then for i=index,#collected do local s = collected[i] local n = s.numbers if #n == depth and n[depth] and n[depth] ~= 0 then sectiondata = s + if quit then + break + end elseif #n < depth then break end @@ -724,6 +739,62 @@ function sections.findnumber(depth,what) end end +function sections.finddata(depth,what) + local data = data.status[depth or data.depth] + if data then + -- if sectiondata and sectiondata.hidenumber ~= true then -- can be nil + local index = data.references.listindex + if index then + local collected = structures.lists.collected + local quit = what == variables.previous or what == variables.next + if what == variables.first or what == variables.previous then + for i=index-1,1,-1 do + local s = collected[i] + if not s then + break + elseif s.metadata.kind == "section" then -- maybe check on name + local n = s.numberdata.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + data = s + if quit then + break + end + elseif #n < depth then + break + end + end + end + elseif what == variables.last or what == variables.next then + for i=index+1,#collected do + local s = collected[i] + if not s then + break + elseif s.metadata.kind == "section" then -- maybe check on name + local n = s.numberdata.numbers + if #n == depth and n[depth] and n[depth] ~= 0 then + data = s + if quit then + break + end + elseif #n < depth then + break + end + end + end + end + end + return data + end +end + +function sections.internalreference(sectionname,what) -- to be used in pagebuilder (no marks used) + local r = type(sectionname) == "number" and sectionname or registered[sectionname] + if r then + local data = sections.finddata(r.level,what) + return data and data.references and data.references.internal + end +end + function sections.fullnumber(depth,what) local sectiondata = sections.findnumber(depth,what) if sectiondata then diff --git a/tex/context/base/strc-flt.mkii b/tex/context/base/strc-flt.mkii index f028089e3..6e8176fe0 100644 --- a/tex/context/base/strc-flt.mkii +++ b/tex/context/base/strc-flt.mkii @@ -1436,18 +1436,6 @@ \fi \fi} -\def\dopreparesidecaption#1#2#3% - {\scratchdimen\dimexpr\hsize-\wd\tempfloatbox-\@@bkmargin\relax % was \tfskipsize\relax - \ifdim\wd\tempcaptionbox>\scratchdimen - \ifdim\wd\tempcaptionbox<1.3\scratchdimen - \scratchdimen0.8\scratchdimen - \fi - \fi - \dosettempcaptionbox % \setbox\tempcaptionbox\vbox - {\hsize\scratchdimen - \doifnothing{\floatcaptionparameter\c!align}\raggedright % on purpose overloads align ! - \putcompletecaption{#2}{#3}}} - \newdimen\tempfloatheight \newdimen\tempfloatwidth diff --git a/tex/context/base/strc-flt.mkiv b/tex/context/base/strc-flt.mkiv index ee6228afd..d9486c569 100644 --- a/tex/context/base/strc-flt.mkiv +++ b/tex/context/base/strc-flt.mkiv @@ -414,6 +414,7 @@ \long\def\putcompletecaption#1#2% {\doifsomething{\floatcaptionparameter\c!spacebefore}{\blank[\floatcaptionparameter\c!spacebefore]}% %\floatcaptionparameter\c!before % test for side effects first +\synchronizedisplaydirection % temp hack, till we have a proper model \noindent \gdef\lastcaptiontag{\strut#1}% was xdef \begingroup @@ -480,7 +481,7 @@ \def\dofakecaptionthings {\hbox{\dosetcaptionthings\hskip\leftskip\hskip\rightskip}} -\long\def\docheckcaptioncontent#1#2% +\long\def\docheckcaptioncontent#1#2% side floats {\ifnofloatcaption \else \setbox\tempcaptionbox\hbox {\settrialtypesetting @@ -1280,7 +1281,7 @@ \doglobal\addlocalbackgroundtobox\floatbox \else % todo: installable maken, variant/method=auto vs macro - \dopreparedocaption{#1}{#2}{#3}% + \dopreparepagecaption{#1}{#2}{#3}% \settracedcaptionbox \edef\width{\the\wd\tempfloatbox}% \addlocalbackgroundtobox\tempfloatbox @@ -1313,7 +1314,7 @@ \fi\fi \locatefloat{\copy\tempfloatbox}}} -\def\dopreparedocaption#1#2#3% +\def\dopreparepagecaption#1#2#3% {\dostarttagged\t!floatcaption\empty \doifinsetelse{\floatcaptionparameter\c!location}{\v!top,\v!bottom} {\doifinsetelse{\floatcaptionparameter\c!width}{\v!fit,\v!max} @@ -1443,18 +1444,6 @@ \fi \fi} -\def\dopreparesidecaption#1#2#3% - {\scratchdimen\dimexpr\hsize-\wd\tempfloatbox-\floatparameter\c!margin\relax % was \tfskipsize\relax - \ifdim\wd\tempcaptionbox>\scratchdimen - \ifdim\wd\tempcaptionbox<1.3\scratchdimen - \scratchdimen0.8\scratchdimen - \fi - \fi - \dosettempcaptionbox % \setbox\tempcaptionbox\vbox - {\hsize\scratchdimen - \doifnothing{\floatcaptionparameter\c!align}\raggedright % on purpose overloads align ! - \putcompletecaption{#2}{#3}}} - \newdimen\tempfloatheight \newdimen\tempfloatwidth @@ -1804,11 +1793,11 @@ %\showcomposition \setbox\tempfloatbox\vbox{\borderedfloatbox}% \addlocalbackgroundtobox\tempfloatbox % no \doglobal - \docheckcaptioncontent{#2}{#3}% \ifnofloatcaption \global\setbox\floatbox\vbox{\box\tempfloatbox}% \else - \dopreparedosidecaption{#1}{#2}{#3}% + \docheckcaptioncontent{#2}{#3}% + \dopreparesidecaption{#1}{#2}{#3}% \settracedcaptionbox \setbox\tempcaptionbox\hbox{\floatcaptionparameter\c!command{\box\tempcaptionbox}}% \moveboxontogrid\tempcaptionbox{\floatcaptionparameter\c!grid}\lastcaptionht @@ -1817,7 +1806,7 @@ \fi \egroup} -\def\dopreparedosidecaption#1#2#3% will be enhanced +\def\dopreparesidecaption#1#2#3% will be enhanced {\doifelse{\floatcaptionparameter\c!width}\v!max {\dosettempcaptionbox {\hsize\wd\tempfloatbox diff --git a/tex/context/base/strc-lev.lua b/tex/context/base/strc-lev.lua index ff30c3f91..2c4743dc4 100644 --- a/tex/context/base/strc-lev.lua +++ b/tex/context/base/strc-lev.lua @@ -27,9 +27,9 @@ function sections.startautolevel(category) level = level + 1 local lc = levels[category] if not lc or level > #lc then - context.nostartstructurehead { format("%s:%s",category,level) } + context.nostarthead { format("%s:%s",category,level) } else - context.dostartstructurehead { lc[level] } + context.dostarthead { lc[level] } end insert(categories,category) end @@ -38,9 +38,9 @@ function sections.stopautolevel() local category = remove(categories) local lc = levels[category] if not lc or level > #lc then - context.nostopstructurehead { format("%s:%s",category,level) } + context.nostophead { format("%s:%s",category,level) } else - context.dostopstructurehead { lc[level] } + context.dostophead { lc[level] } end level = level - 1 end diff --git a/tex/context/base/strc-lst.lua b/tex/context/base/strc-lst.lua index 21015e43a..930ff2d0a 100644 --- a/tex/context/base/strc-lst.lua +++ b/tex/context/base/strc-lst.lua @@ -104,6 +104,9 @@ local function initializer() end end end + if r then + r.listindex = i -- handy to have + end end end @@ -118,6 +121,7 @@ function lists.push(t) p = #cached + 1 cached[p] = helpers.simplify(t) pushed[i] = p + r.listindex = p end texwrite(p) end @@ -144,7 +148,12 @@ function lists.enhance(n) local kind = metadata.kind local name = metadata.name if references then - references.tag = tags.getid(kind,name) + -- is this used ? + local tag = tags.getid(kind,name) + if tag and tag ~= "?" then + references.tag = tag + end + --~ references.listindex = n end -- specific enhancer (kind of obsolete) local enhancer = kind and lists.enhancers[kind] diff --git a/tex/context/base/strc-not.mkiv b/tex/context/base/strc-not.mkiv index 09a834c46..f75ed118c 100644 --- a/tex/context/base/strc-not.mkiv +++ b/tex/context/base/strc-not.mkiv @@ -188,6 +188,7 @@ %\c!continue=\v!no, \c!paragraph=\v!no, \c!inbetween=\hskip1em, + \c!symbol=\mathematics{*}, \c!n=1] \setupnotes @@ -633,6 +634,7 @@ \setvalue{\??dd:\v!note:\s!handler:\s!text }{\@@donotetext} \setvalue{\??dd:\v!note:\s!handler:\s!number}{\@@donotenumber} +\setvalue{\??dd:\v!note:\s!handler:\s!symbol}{\@@donotesymbol} \setvalue{\??dd:\v!note:\s!handler }{\@@donotehandler} \setvalue{\??dd:\v!note:\s!handler:\s!do }{\@@somenotedescription} \setvalue{\??dd:\v!note:\s!handler:\s!start }{\@@startsomenotedescription} @@ -725,21 +727,22 @@ \def\currentnotedescriptiontext % todo: can be other number {\ctxlua{structures.notes.title("\currentnote",\currentdescriptionnumberentry)}} -\def\@@donotetext - {\ifconditional\enumerationnumberenabled +\def\@@donotetext % same as \@@doenumerationtext + {\ifcase\enumerationnumberstate + \doenumerationfullnumbernop + \or \iftrialtypesetting - \doenumerationfullnumber\showdntext + \doenumerationfullnumberyes \doenumerationcouplingsymbol \else \doenumerationregistercoupling - \doenumerationfullnumber\showdntext + \doenumerationfullnumberyes \doenumerationcouplingsymbol \fi - \else - \doenumerationfullnumber\showdnpuretext + \or + \doenumerationfullnumbersymbol \fi} -% \def\currentnoteenumerationfullnumber \def\@@donotenumber {\doifelse{\noteparameter\c!interaction}\v!no {\docurrentnoteenumerationfullnumber}% @@ -747,12 +750,25 @@ {\docurrentnoteenumerationfullnumber}% [page(\ctxlua{structures.notes.getsymbolpage("\currentnote",\currentdescriptionnumberentry)})]}} +\def\@@donotesymbol + {\doifelse{\noteparameter\c!interaction}\v!no + {\docurrentnoteenumerationsymbol}% + {\directgotobox + {\docurrentnoteenumerationsymbol}% + [page(\ctxlua{structures.notes.getsymbolpage("\currentnote",\currentdescriptionnumberentry)})]}} + \def\docurrentnoteenumerationfullnumber {\noteparameter\c!numbercommand {\dosetnoteattributes\c!numberstyle\c!numbercolor \ctxlua{structures.notes.number("\currentnote",\currentdescriptionnumberentry)}% \domovednote\currentdescription\currentdescriptionnumberentry\v!nextpage\v!previouspage}} +\def\docurrentnoteenumerationsymbol + {\noteparameter\c!numbercommand + {\dosetnoteattributes\c!numberstyle\c!numbercolor + \descriptionparameter\c!symbol + \domovednote\currentdescription\currentdescriptionnumberentry\v!nextpage\v!previouspage}} + \def\synchronizesomenotesymbol#1#2#3% called more often than needed {\iftrialtypesetting\else \normalexpanded{\noexpand\ctxlatelua{structures.notes.setsymbolpage("#1",#2,#3)}}% diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua index 849cedbe7..e8f2e89e3 100644 --- a/tex/context/base/strc-ref.lua +++ b/tex/context/base/strc-ref.lua @@ -7,15 +7,16 @@ if not modules then modules = { } end modules ['strc-ref'] = { } local format, find, gmatch, match, concat = string.format, string.find, string.gmatch, string.match, table.concat -local lpegmatch, lpegP, lpegCs = lpeg.match, lpeg.P, lpeg.Cs local texcount, texsetcount = tex.count, tex.setcount -local rawget = rawget +local rawget, tonumber = rawget, tonumber +local lpegmatch, lpegP, lpegS, lpegCs, lpegCt, lpegCf, lpegCc, lpegC, lpegCg = lpeg.match, lpeg.P, lpeg.S, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cc, lpeg.C, lpeg.Cg local allocate = utilities.storage.allocate local mark = utilities.storage.mark local setmetatableindex = table.setmetatableindex -local trace_referencing = false trackers.register("structures.referencing", function(v) trace_referencing = v end) +local trace_referencing = false trackers.register("structures.referencing", function(v) trace_referencing = v end) +local trace_analyzing = false trackers.register("structures.referencing.analyzing", function(v) trace_analyzing = v end) local report_references = logs.reporter("structure","references") @@ -210,56 +211,149 @@ end -- this reference parser is just an lpeg version of the tex based one -local result = { } - -local lparent, rparent, lbrace, rbrace, dcolon, backslash = lpegP("("), lpegP(")"), lpegP("{"), lpegP("}"), lpegP("::"), lpegP("\\") - -local reset = lpegP("") / function() result = { } end -local b_token = backslash / function(s) result.has_tex = true return s end - -local o_token = 1 - rparent - rbrace - lparent - lbrace -local a_token = 1 - rbrace -local s_token = 1 - lparent - lbrace - lparent - lbrace -local i_token = 1 - lparent - lbrace -local f_token = 1 - lparent - lbrace - dcolon - -local outer = (f_token )^1 / function (s) result.outer = s end -local operation = lpegCs((b_token + o_token)^1) / function (s) result.operation = s end -local arguments = lpegCs((b_token + a_token)^0) / function (s) result.arguments = s end -local special = (s_token )^1 / function (s) result.special = s end -local inner = (i_token )^1 / function (s) result.inner = s end - -local outer_reference = (outer * dcolon)^0 - -operation = outer_reference * operation -- special case: page(file::1) and file::page(1) - -local optional_arguments = (lbrace * arguments * rbrace)^0 -local inner_reference = inner * optional_arguments -local special_reference = special * lparent * (operation * optional_arguments + operation^0) * rparent - -local scanner = (reset * outer_reference * (special_reference + inner_reference)^-1 * -1) / function() return result end - ---~ function references.analyze(str) -- overloaded ---~ return lpegmatch(scanner,str) ---~ end - -function references.split(str) - return lpegmatch(scanner,str or "") +-- local result = { } +-- +-- local lparent = lpegP("(") +-- local rparent = lpegP(")") +-- local lbrace = lpegP("{") +-- local rbrace = lpegP("}") +-- local dcolon = lpegP("::") +-- local backslash = lpegP("\\") +-- +-- local reset = lpegP("") / function() result = { } end +-- local b_token = backslash / function(s) result.has_tex = true return s end +-- +-- local o_token = 1 - rparent - rbrace - lparent - lbrace +-- local a_token = 1 - rbrace +-- local s_token = 1 - lparent - lbrace +-- local i_token = 1 - lparent - lbrace +-- local f_token = 1 - lparent - lbrace - dcolon +-- +-- local outer = (f_token )^1 / function (s) result.outer = s end +-- local operation = lpegCs((b_token + o_token)^1) / function (s) result.operation = s end +-- local arguments = lpegCs((b_token + a_token)^0) / function (s) result.arguments = s end +-- local special = (s_token )^1 / function (s) result.special = s end +-- local inner = (i_token )^1 / function (s) result.inner = s end +-- +-- local outer_reference = (outer * dcolon)^0 +-- +-- operation = outer_reference * operation -- special case: page(file::1) and file::page(1) +-- +-- local optional_arguments = (lbrace * arguments * rbrace)^0 +-- local inner_reference = inner * optional_arguments +-- local special_reference = special * lparent * (operation * optional_arguments + operation^0) * rparent +-- +-- local scanner = (reset * outer_reference * (special_reference + inner_reference)^-1 * -1) / function() return result end +-- +-- function references.split(str) +-- return lpegmatch(scanner,str or "") +-- end + +-- the scanner accepts nested outer, but we don't care too much, maybe some day we will +-- have both but currently the innermost wins + +local spaces = lpegP(" ")^0 +local lparent = lpegP("(") +local rparent = lpegP(")") +local lbrace = lpegP("{") +local rbrace = lpegP("}") +local dcolon = lpegP("::") +local backslash = lpegP("\\") + + lparent = spaces * lparent * spaces + rparent = spaces * rparent * spaces + lbrace = spaces * lbrace * spaces + rbrace = spaces * rbrace * spaces + dcolon = spaces * dcolon * spaces + +local endofall = spaces * lpegP(-1) + +local o_token = 1 - rparent - rbrace - lparent - lbrace -- can be made more efficient +local a_token = 1 - rbrace +local s_token = 1 - lparent - lbrace +local i_token = 1 - lparent - lbrace - endofall +local f_token = 1 - lparent - lbrace - dcolon + +local hastexcode = lpegCg(lpegCc("has_tex") * lpegCc(true)) -- cannot be made to work +local outer = lpegCg(lpegCc("outer") * lpegCs(f_token^1)) +local operation = lpegCg(lpegCc("operation") * lpegCs(o_token^1)) +local arguments = lpegCg(lpegCc("arguments") * lpegCs(a_token^0)) +local special = lpegCg(lpegCc("special") * lpegCs(s_token^1)) +local inner = lpegCg(lpegCc("inner") * lpegCs(i_token^1)) + + arguments = (lbrace * arguments * rbrace)^-1 + outer = (outer * dcolon)^-1 + operation = outer * operation -- special case: page(file::1) and file::page(1) + inner = inner * arguments + special = special * lparent * (operation * arguments)^-1 * rparent + +local scanner = spaces * lpegCf (lpegCt("") * outer * (special + inner)^-1 * endofall, rawset) + +local function splitreference(str) + if str and str ~= "" then + local t = lpegmatch(scanner,str) + if t then + local a = t.arguments + if a and find(a,"\\") then + t.has_tex = true + else + local o = t.arguments + if o and find(o,"\\") then + t.has_tex = true + end + end + return t + end + end end ---~ print(table.serialize(references.analyze(""))) ---~ print(table.serialize(references.analyze("inner"))) ---~ print(table.serialize(references.analyze("special(operation{argument,argument})"))) ---~ print(table.serialize(references.analyze("special(operation)"))) ---~ print(table.serialize(references.analyze("special()"))) ---~ print(table.serialize(references.analyze("inner{argument}"))) ---~ print(table.serialize(references.analyze("outer::"))) ---~ print(table.serialize(references.analyze("outer::inner"))) ---~ print(table.serialize(references.analyze("outer::special(operation{argument,argument})"))) ---~ print(table.serialize(references.analyze("outer::special(operation)"))) ---~ print(table.serialize(references.analyze("outer::special()"))) ---~ print(table.serialize(references.analyze("outer::inner{argument}"))) ---~ print(table.serialize(references.analyze("special(outer::operation)"))) +references.split = splitreference + +--~ inspect(splitreference([[ ]])) +--~ inspect(splitreference([[ inner ]])) +--~ inspect(splitreference([[ special ( operation { argument, argument } ) ]])) +--~ inspect(splitreference([[ special ( operation { argument } ) ]])) +--~ inspect(splitreference([[ special ( operation { argument, \argument } ) ]])) +--~ inspect(splitreference([[ special ( operation { \argument } ) ]])) +--~ inspect(splitreference([[ special ( operation ) ]])) +--~ inspect(splitreference([[ special ( \operation ) ]])) +--~ inspect(splitreference([[ special ( o\peration ) ]])) +--~ inspect(splitreference([[ special ( ) ]])) +--~ inspect(splitreference([[ inner { argument } ]])) +--~ inspect(splitreference([[ inner { \argument } ]])) +--~ inspect(splitreference([[ inner { ar\gument } ]])) +--~ inspect(splitreference([[inner{a\rgument}]])) +--~ inspect(splitreference([[ inner { argument, argument } ]])) +--~ inspect(splitreference([[ inner { argument, \argument } ]])) -- fails: bug in lpeg? +--~ inspect(splitreference([[ inner { \argument, \argument } ]])) +--~ inspect(splitreference([[ outer :: ]])) +--~ inspect(splitreference([[ outer :: inner]])) +--~ inspect(splitreference([[ outer :: special (operation { argument,argument } ) ]])) +--~ inspect(splitreference([[ outer :: special (operation { } )]])) +--~ inspect(splitreference([[ outer :: special ( operation { argument, \argument } ) ]])) +--~ inspect(splitreference([[ outer :: special ( operation ) ]])) +--~ inspect(splitreference([[ outer :: special ( \operation ) ]])) +--~ inspect(splitreference([[ outer :: special ( ) ]])) +--~ inspect(splitreference([[ outer :: inner { argument } ]])) +--~ inspect(splitreference([[ special ( outer :: operation ) ]])) + +--~ inspect(splitreference([[]])) +--~ inspect(splitreference([[inner]])) +--~ inspect(splitreference([[special(operation{argument,argument})]])) +--~ inspect(splitreference([[special(operation)]])) +--~ inspect(splitreference([[special(\operation)]])) +--~ inspect(splitreference([[special()]])) +--~ inspect(splitreference([[inner{argument}]])) +--~ inspect(splitreference([[inner{\argument}]])) +--~ inspect(splitreference([[outer::]])) +--~ inspect(splitreference([[outer::inner]])) +--~ inspect(splitreference([[outer::special(operation{argument,argument})]])) +--~ inspect(splitreference([[outer::special(operation{argument,\argument})]])) +--~ inspect(splitreference([[outer::special(operation)]])) +--~ inspect(splitreference([[outer::special(\operation)]])) +--~ inspect(splitreference([[outer::special()]])) +--~ inspect(splitreference([[outer::inner{argument}]])) +--~ inspect(splitreference([[special(outer::operation)]])) -- -- -- related to strc-ini.lua -- -- -- @@ -719,7 +813,7 @@ local function resolve(prefix,reference,args,set) -- we start with prefix,refere if d then resolve(prefix,d[2],nil,set) else - local var = lpegmatch(scanner,ri) + local var = splitreference(ri) if var then var.reference = ri local vo, vi = var.outer, var.inner @@ -758,7 +852,6 @@ local function resolve(prefix,reference,args,set) -- we start with prefix,refere if set.has_tex then texcount.referencehastexstate = 1 end ---~ table.print(set) return set else return { } @@ -1047,8 +1140,6 @@ set.n = n var.p = p else -- these are taken from other data structures (like lists) ---~ print("!!!!!!!!!!!!!!",splitprefix,splitinner) ---~ table.print(derived) if splitprefix and splitinner then if splitprefix == "-" then i = derived[""] @@ -1109,7 +1200,9 @@ set.n = n set[i] = var end references.currentset = mark(set) -- mark, else in api doc ---~ table.print(set,tostring(bug)) + if trace_analyzing then + report_references(table.serialize(set,reference)) + end return set, bug end @@ -1488,7 +1581,8 @@ end runners["special operation"] = runners["special"] runners["special operation with arguments"] = runners["special"] --- weird, why is this code here and in lpdf-ano +-- These are the testspecials not the real ones. They are used to +-- check the validity. function specials.internal(var,actions) local v = references.internals[tonumber(var.operation)] @@ -1529,3 +1623,15 @@ function specials.userpage(var,actions) actions.realpage = actions.realpage or p -- first wins end end + +function specials.section(var,actions) + local sectionname = var.arguments + local destination = var.operation + local internal = structures.sections.internalreference(sectionname,destination) + if internal then + var.special = "internal" + var.operation = internal + var.arguments = nil + specials.internal(var,actions) + end +end diff --git a/tex/context/base/strc-ref.mkiv b/tex/context/base/strc-ref.mkiv index 9de6b923a..e7728a7b9 100644 --- a/tex/context/base/strc-ref.mkiv +++ b/tex/context/base/strc-ref.mkiv @@ -1803,6 +1803,22 @@ \def\currentreferencedefault {\ctxlua{structures.references.filter("default",\ctxlua{structures.references.getcurrentprefixspec("\v!default")})}} +%D Not all support is visible by looking at the \TEX\ code; here is one of those:^ +%D +%D \starttyping +%D \startinteractionmenu[right] +%D \startbut [section(first {chapter})] first chapter \stopbut +%D \startbut [section(previous{chapter})] previous chapter \stopbut +%D \startbut [section(next {chapter})] next chapter \stopbut +%D \startbut [section(last {chapter})] last chapter \stopbut +%D \blank[2*big] +%D \startbut [section(first {section})] first section \stopbut +%D \startbut [section(previous{section})] previous section \stopbut +%D \startbut [section(next {section})] next section \stopbut +%D \startbut [section(last {section})] last section \stopbut +%D \stopinteractionmenu +%D \stoptyping + \protect \endinput % tricky: diff --git a/tex/context/base/strc-sec.mkiv b/tex/context/base/strc-sec.mkiv index 94f21a12b..f26ed8e7e 100644 --- a/tex/context/base/strc-sec.mkiv +++ b/tex/context/base/strc-sec.mkiv @@ -112,6 +112,20 @@ \to \everydefinehead \appendtoks + % beware, this is a global register + \begingroup + \edef\currentsectionheadcoupling{\sectionheadcoupling\currenthead}% + \edef\currentsectionheadsection {\sectionheadsection \currentsectionheadcoupling}% + \edef\currentsectionlevel {\sectionlevel \currentsectionheadsection}% + \ctxlua{structures.sections.register("\currenthead",{ + coupling = "\currentsectionheadcoupling", + section = "\currentsectionheadsection", + level = \currentsectionlevel, + })}% + \endgroup +\to \everydefinehead + +\appendtoks % \setevalue{\e!next \currenthead}{\donexthead [\currenthead]}% \setevalue{\e!start\currenthead}{\dostarthead[\currenthead]}% \setevalue{\e!stop \currenthead}{\dostophead[\currenthead]}% diff --git a/tex/context/base/strc-tag.lua b/tex/context/base/strc-tag.lua index 886f00d36..20da7782d 100644 --- a/tex/context/base/strc-tag.lua +++ b/tex/context/base/strc-tag.lua @@ -267,6 +267,16 @@ function tags.start(tag,specification) return t end +function tags.restart(completetag) + local t = #taglist + 1 + nstack = nstack + 1 + chain[nstack] = completetag + stack[nstack] = t + taglist[t] = { unpack(chain,1,nstack) } + texattribute[a_tagged] = t + return t +end + function tags.stop() if nstack > 0 then nstack = nstack -1 diff --git a/tex/context/base/supp-mat.mkiv b/tex/context/base/supp-mat.mkiv index 944d1a9e0..9049472bd 100644 --- a/tex/context/base/supp-mat.mkiv +++ b/tex/context/base/supp-mat.mkiv @@ -116,6 +116,8 @@ %D such a way that spacing is acceptable. I won't spend much %D words on these macros, because they will be overloaded in %D the units module. +%D +%D REPLACED (keep commented): \newsignal\dimensionsignal diff --git a/tex/context/base/supp-num.mkiv b/tex/context/base/supp-num.mkiv index 43b7fce9b..a55ee1fb3 100644 --- a/tex/context/base/supp-num.mkiv +++ b/tex/context/base/supp-num.mkiv @@ -57,9 +57,9 @@ %D %D \startbuffer %D \setdigitsign 0 \digits +12.345,90 -%D \setdigitmode 1 \digits +12.345,90 -%D \setdigitmode 2 \digits +12.345,90 -%D \setdigitmode 3 \digits +12.345,90 +%D \setdigitsign 1 \digits +12.345,90 +%D \setdigitsign 2 \digits +12.345,90 +%D \setdigitsign 3 \digits +12.345,90 %D \stopbuffer %D %D \typebuffer @@ -97,9 +97,6 @@ \ifx\mbox\undefined \let\mbox\normalhbox \fi -% \unexpanded\def\digits -% {\bgroup\let~@\doifnextbgroupelse\dodigits\grabdigit} - \unexpanded\def\digits {\bgroup \let~@% diff --git a/tex/context/base/symb-cow.mkiv b/tex/context/base/symb-imp-cow.mkiv index 59eabae28..59eabae28 100644 --- a/tex/context/base/symb-cow.mkiv +++ b/tex/context/base/symb-imp-cow.mkiv diff --git a/tex/context/base/symb-eur.mkiv b/tex/context/base/symb-imp-eur.mkiv index 19a88c8fa..19a88c8fa 100644 --- a/tex/context/base/symb-eur.mkiv +++ b/tex/context/base/symb-imp-eur.mkiv diff --git a/tex/context/base/symb-jmn.mkiv b/tex/context/base/symb-imp-jmn.mkiv index d66e0a332..d66e0a332 100644 --- a/tex/context/base/symb-jmn.mkiv +++ b/tex/context/base/symb-imp-jmn.mkiv diff --git a/tex/context/base/symb-mis.mkiv b/tex/context/base/symb-imp-mis.mkiv index 1037ba542..1037ba542 100644 --- a/tex/context/base/symb-mis.mkiv +++ b/tex/context/base/symb-imp-mis.mkiv diff --git a/tex/context/base/symb-mvs.mkiv b/tex/context/base/symb-imp-mvs.mkiv index 9902fc9e8..9902fc9e8 100644 --- a/tex/context/base/symb-mvs.mkiv +++ b/tex/context/base/symb-imp-mvs.mkiv diff --git a/tex/context/base/symb-nav.mkiv b/tex/context/base/symb-imp-nav.mkiv index 6e2f2c1f0..6e2f2c1f0 100644 --- a/tex/context/base/symb-nav.mkiv +++ b/tex/context/base/symb-imp-nav.mkiv diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua index 730b5c4dc..d5bc4cb56 100644 --- a/tex/context/base/task-ini.lua +++ b/tex/context/base/task-ini.lua @@ -62,6 +62,7 @@ appendaction("shipouts", "finishers", "attributes.effects.handler") appendaction("shipouts", "finishers", "attributes.viewerlayers.handler") -- disabled appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled) +appendaction("math", "normalizers", "noads.handlers.families", nil, "nohead") -- always on appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- always on diff --git a/tex/context/base/type-ini.mkiv b/tex/context/base/type-ini.mkiv index 389ca3980..d901f2052 100644 --- a/tex/context/base/type-ini.mkiv +++ b/tex/context/base/type-ini.mkiv @@ -371,12 +371,12 @@ \let\typefaceencoding\s!default % obsolete \def\dostarttypefacedefining#1#2#3% - {\geteparameters[\??ts][\s!rscale=\plusone,\s!features=,\s!fallbacks=,\s!goodies=,#3]% + {\geteparameters[\??ts][\s!rscale=\plusone,\s!features=,\s!fallbacks=,\s!goodies=,\s!direction=,#3]% \pushmacro\fontclass \setcurrentfontclass{#1}% \pushmacro\relativefontsize \let\relativefontsize\@@tsrscale % still needed ? - \savefontclassparameters{#2}\@@tsrscale\@@tsfeatures\@@tsfallbacks\@@tsgoodies} + \savefontclassparameters{#2}\@@tsrscale\@@tsfeatures\@@tsfallbacks\@@tsgoodies\@@tsdirection} \def\dostoptypefacedefining {\popmacro\relativefontsize diff --git a/tex/context/base/type-one.mkiv b/tex/context/base/type-one.mkiv index 8e8e82c5b..12b9e14ad 100644 --- a/tex/context/base/type-one.mkiv +++ b/tex/context/base/type-one.mkiv @@ -15,99 +15,4 @@ % one fonts but there are not that many useful left that only % come in type one. -\starttypescriptcollection[allkindofafm] - - % \starttypescript [serif] [bookman] [name] - % \definefontsynonym [Serif] [Bookman-Light] - % \definefontsynonym [SerifItalic] [Bookman-LightItalic] - % \definefontsynonym [SerifBold] [Bookman-DemiBold] - % \definefontsynonym [SerifBoldItalic] [Bookman-DemiBoldItalic] - % \definefontsynonym [SerifSlanted] [Bookman-LightSlanted] - % \definefontsynonym [SerifBoldSlanted] [Bookman-DemiBoldSlanted] - % \definefontsynonym [SerifCaps] [Bookman-Light-Caps] - % \stoptypescript - - % \starttypescript [calligraphy] [chancery] [name] - % \definefontsynonym [Calligraphy] [Chancery] - % \stoptypescript - - % \starttypescript [serif] [schoolbook] [name] - % \definefontsynonym [Serif] [Schoolbook-Roman] - % \definefontsynonym [SerifItalic] [Schoolbook-Italic] - % \definefontsynonym [SerifBold] [Schoolbook-Bold] - % \definefontsynonym [SerifBoldItalic] [Schoolbook-BoldItalic] - % \definefontsynonym [SerifSlanted] [Schoolbook-RomanSlanted] - % \definefontsynonym [SerifBoldSlanted] [Schoolbook-BoldSlanted] - % \definefontsynonym [SerifCaps] [Schoolbook-Roman-Caps] - % \stoptypescript - - % \starttypescript [serif] [utopia] [name] - % \definefontsynonym [Serif] [Utopia-Regular] - % \definefontsynonym [SerifItalic] [Utopia-Italic] - % \definefontsynonym [SerifBold] [Utopia-Bold] - % \definefontsynonym [SerifBoldItalic] [Utopia-BoldItalic] - % \definefontsynonym [SerifSlanted] [Utopia-Slanted] - % \definefontsynonym [SerifBoldSlanted] [Utopia-BoldSlanted] - % \definefontsynonym [SerifCaps] [Utopia-Regular-Caps] - % \stoptypescript - - % \starttypescript [serif] [charter] [name] - % \definefontsynonym [Serif] [Charter-Roman] - % \definefontsynonym [SerifItalic] [Charter-Italic] - % \definefontsynonym [SerifBold] [Charter-Bold] - % \definefontsynonym [SerifBoldItalic] [Charter-BoldItalic] - % \definefontsynonym [SerifSlanted] [Charter-Slanted] - % \definefontsynonym [SerifBoldSlanted] [Charter-BoldSlanted] - % \definefontsynonym [SerifCaps] [Charter-Roman-Caps] - % \stoptypescript - - % \starttypescript [serif] [times] [name] - % \definefontsynonym [Serif] [Times-Roman] - % \definefontsynonym [SerifBold] [Times-Bold] - % \definefontsynonym [SerifItalic] [Times-Italic] - % \definefontsynonym [SerifSlanted] [Times-Italic] - % \definefontsynonym [SerifBoldItalic] [Times-BoldItalic] - % \definefontsynonym [SerifBoldSlanted] [Times-BoldItalic] - % \definefontsynonym [SerifCaps] [Times-Caps] - % \stoptypescript - - % \starttypescript [sans] [helvetica] [name] - % \definefontsynonym [Sans] [Helvetica] - % \definefontsynonym [SansBold] [Helvetica-Bold] - % \definefontsynonym [SansItalic] [Helvetica-Oblique] - % \definefontsynonym [SansSlanted] [Helvetica-Oblique] - % \definefontsynonym [SansBoldItalic] [Helvetica-BoldOblique] - % \definefontsynonym [SansBoldSlanted] [Helvetica-BoldOblique] - % \definefontsynonym [SansCaps] [Helvetica-Caps] - % \stoptypescript - - % \starttypescript [math] [helvetica] [name] - % \definefontsynonym [MathRoman] [Helvetica-MathRoman] - % \definefontsynonym [MathExtension] [Helvetica-MathExtendedSymbols] - % \definefontsynonym [MathItalic] [Helvetica-MathItalics] - % \definefontsynonym [MathSymbol] [Helvetica-MathSymbols] - % \stoptypescript - - % \starttypescript [mono] [courier] [name] - % \definefontsynonym [Mono] [Courier] - % \definefontsynonym [MonoBold] [Courier-Bold] - % \definefontsynonym [MonoItalic] [Courier-Oblique] - % \definefontsynonym [MonoSlanted] [Courier-Oblique] - % \definefontsynonym [MonoBoldItalic] [Courier-BoldOblique] - % \definefontsynonym [MonoBoldSlanted] [Courier-BoldOblique] - % \definefontsynonym [MonoCaps] [Courier] - % \stoptypescript - - % \starttypescript [serif] [palatino] [name] - % \definefontsynonym [Serif] [Palatino] - % \definefontsynonym [SerifBold] [Palatino-Bold] - % \definefontsynonym [SerifItalic] [Palatino-Italic] - % \definefontsynonym [SerifSlanted] [Palatino-Slanted] - % \definefontsynonym [SerifBoldItalic] [Palatino-BoldItalic] - % \definefontsynonym [SerifBoldSlanted] [Palatino-BoldSlanted] - % \definefontsynonym [SerifCaps] [Palatino-Caps] - % \stoptypescript - -\stoptypescriptcollection - \endinput diff --git a/tex/context/base/type-otf.mkiv b/tex/context/base/type-otf.mkiv index 0ef85ad88..0685ec9ef 100644 --- a/tex/context/base/type-otf.mkiv +++ b/tex/context/base/type-otf.mkiv @@ -64,11 +64,6 @@ % \definefontsynonym [LMRoman10-DemiOblique] [\s!name:LMRoman10-DemiOblique] [\s!features=\s!default] % \definefontsynonym [LMRoman10-DemiOblique] [lmroman10-demioblique] [\s!features=\s!default] -% \starttypescript [math] [modern,computer-modern,latin-modern] -% \definefontsynonym[MathSymbol][MathSymbol-Regular@symbol-math] -% \loadfontgoodies[symbol-math] -% \stoptypescript - \starttypescriptcollection[latinmodern] %D The names have changed (again) ... but I will not change the symbolic names @@ -226,9 +221,6 @@ \starttypescript [math] [modern,latin-modern,computer-modern,default] [name] \definefontsynonym [MathRoman] [LMMathRoman-Regular] \definefontsynonym [MathRomanBold] [LMMathRoman-Bold] - %definefontsynonym [MathExtension] [LMMathExtension-Regular] - %definefontsynonym [MathItalic] [LMMathItalic-Italic] - %definefontsynonym [MathSymbol] [LMMathSymbols-Italic] \stoptypescript \starttypescript [serif] [modern-variable,latin-modern-variable,default] [name] @@ -1814,43 +1806,48 @@ \stoptypescriptcollection -\starttypescriptcollection[stix] - - % This typescript is only provided to keep an eye on developments of this font - % but currenty these are not proper opentype math fonts (for instance they have - % no math table yet). We will not make a virtual font for this as eventually - % there will be a decent version. Beware, we force an otf suffix as there happen - % to be ttf files as well. BTW, why 'italic' infull and 'bol' without 'd'? - - \starttypescript [math] [stix] [name] - \definefontsynonym[MathRoman][\s!file:stixgeneral.otf] [\s!features=\s!math] - \stoptypescript - - \starttypescript [serif] [stix] [name] - \setups[\s!font:\s!fallback:\s!serif] - \definefontsynonym[\s!Serif] [\s!file:stixgeneral.otf] [\s!features=\s!default] - \definefontsynonym[\s!SerifBold] [\s!file:stixgeneralbol.otf] [\s!features=\s!default] - \definefontsynonym[\s!SerifItalic] [\s!file:stixgeneralitalic.otf] [\s!features=\s!default] - \definefontsynonym[\s!SerifBoldItalic][\s!file:stixgeneralbolita.otf] [\s!features=\s!default] - \stoptypescript - - \starttypescript[stix] - \definetypeface [stix] [rm] [\s!serif] [stix] [\s!default] - \definetypeface [stix] [mm] [\s!math] [stix] [\s!default] - \stoptypescript +% \starttypescriptcollection[stix] +% +% % This typescript is only provided to keep an eye on developments of this font +% % but currenty these are not proper opentype math fonts (for instance they have +% % no math table yet). We will not make a virtual font for this as eventually +% % there will be a decent version. Beware, we force an otf suffix as there happen +% % to be ttf files as well. BTW, why 'italic' infull and 'bol' without 'd'? +% +% % As xits keeps evolving and also is bidirectional, it makes no sense to keep +% % stix support around. +% +% \starttypescript [math] [stix] [name] +% \definefontsynonym[MathRoman][\s!file:stixgeneral.otf] [\s!features=\s!math] +% \stoptypescript +% +% \starttypescript [serif] [stix] [name] +% \setups[\s!font:\s!fallback:\s!serif] +% \definefontsynonym[\s!Serif] [\s!file:stixgeneral.otf] [\s!features=\s!default] +% \definefontsynonym[\s!SerifBold] [\s!file:stixgeneralbol.otf] [\s!features=\s!default] +% \definefontsynonym[\s!SerifItalic] [\s!file:stixgeneralitalic.otf] [\s!features=\s!default] +% \definefontsynonym[\s!SerifBoldItalic][\s!file:stixgeneralbolita.otf] [\s!features=\s!default] +% \stoptypescript +% +% \starttypescript[stix] +% \definetypeface [stix] [rm] [\s!serif] [stix] [\s!default] +% \definetypeface [stix] [mm] [\s!math] [stix] [\s!default] +% \stoptypescript +% +% \stoptypescriptcollection -\stoptypescriptcollection +% This one makes more sense. Xits uses the glyph collection from stix but packages +% it in a proper OpenType Math font. From the Stix site: Version 1.1, which will +% include fonts packaged for use with Microsoft Office applications, is scheduled +% for release by the end of 2010. Version 1.2, which will include Type 1 fonts for +% use with LaTeX, will follow in 2011. So, we are on our own anyway. \starttypescriptcollection[xits] - % This one makes more sense. Xits uses the glyph collection from stix but packages - % it in a proper OpenType Math font. From the Stix site: Version 1.1, which will - % include fonts packaged for use with Microsoft Office applications, is scheduled - % for release by the end of 2010. Version 1.2, which will include Type 1 fonts for - % use with LaTeX, will follow in 2011. So, we are on our own anyway. - - \starttypescript [math] [xits] [name] - \definefontsynonym[MathRoman][\s!file:xits-math.otf][\s!features=\s!math\mathsizesuffix,\s!goodies=xits-math] + \starttypescript [math] [xits,xitsbidi] [name] + \definefontsynonym[MathRoman] [\s!file:xits-math.otf][\s!features=\s!math\mathsizesuffix,\s!goodies=xits-math] + \definefontsynonym[MathRomanL2R][\s!file:xits-math.otf][\s!features=\s!math\mathsizesuffix-l2r,\s!goodies=xits-math] + \definefontsynonym[MathRomanR2L][\s!file:xits-math.otf][\s!features=\s!math\mathsizesuffix-r2l,\s!goodies=xits-math] \stoptypescript \starttypescript [serif] [xits] [name] @@ -1868,6 +1865,16 @@ \definetypeface [xits] [mm] [\s!math] [xits] [\s!default] \stoptypescript + % for the moment + + \starttypescript[xitsbidi] + \definetypeface [xitsbidi] [rm] [\s!serif] [xits] [\s!default] + \definetypeface [xitsbidi] [ss] [\s!sans] [heros] [\s!default] [\s!rscale=0.9] + \definetypeface [xitsbidi] [tt] [\s!mono] [modern] [\s!default] [\s!rscale=1.05] + %definetypeface [xitsbidi] [mm] [\s!math] [xitsbidi] [bidi] [\s!direction=\s!both] + \definetypeface [xitsbidi] [mm] [\s!math] [xitsbidi] [\s!default] [\s!direction=\s!both] + \stoptypescript + \stoptypescriptcollection \starttypescriptcollection[punk] diff --git a/tex/context/base/type-siz.mkiv b/tex/context/base/type-siz.mkiv index 040579675..ec51bc77d 100644 --- a/tex/context/base/type-siz.mkiv +++ b/tex/context/base/type-siz.mkiv @@ -43,6 +43,12 @@ [mm] [bfmath] \stoptypescript +% \starttypescript [math] [bidi] [size] +% \definebodyfont +% [4pt,5pt,6pt,7pt,8pt,9pt,10pt,11pt,12pt,14.4pt,17.3pt] +% [mm] [bidi] +% \stoptypescript + \starttypescript [handwriting] [default] [size] \definebodyfont [4pt,5pt,6pt,7pt,8pt,9pt,10pt,11pt,12pt,14.4pt,17.3pt] @@ -375,8 +381,6 @@ \stoptypescript -% ms=MathSymbol sa 1 - \starttypescript [math] [modern,computer-modern,latin-modern] [size] \definebodyfont [12pt] [mm] [mr=LMMathRoman12-Regular sa 1, mb=LMMathRoman12-Bold sa 1] diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua index be0c244ff..fa2d96e41 100644 --- a/tex/context/base/util-deb.lua +++ b/tex/context/base/util-deb.lua @@ -158,3 +158,20 @@ function inspect(i) -- global function print(tostring(i)) end end + +-- from the lua book: + +function traceback() + local level = 1 + while true do + local info = debug.getinfo(level, "Sl") + if not info then + break + elseif info.what == "C" then + print(format("%3i : C function",level)) + else + print(format("%3i : [%s]:%d",level,info.short_src,info.currentline)) + end + level = level + 1 + end +end diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml index 2d53444fb..dbe1b0223 100644 --- a/tex/context/interface/cont-nl.xml +++ b/tex/context/interface/cont-nl.xml @@ -5175,7 +5175,7 @@ <cd:parameter name="maxbreedte"> <cd:constant type="cd:dimension"/> </cd:parameter> - <cd:parameter name="order"> + <cd:parameter name="volgorde"> <cd:constant type="commando"/> <cd:constant type="alles"/> <cd:constant type="normaal"/> diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml index 6fb9bae2f..ce046db2c 100644 --- a/tex/context/interface/keys-cs.xml +++ b/tex/context/interface/keys-cs.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='prikazpo'/> <cd:constant name='commandbefore' value='predchoziprikaz'/> <cd:constant name='commands' value='prikazy'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='component'/> <cd:constant name='compoundhyphen' value='compoundhyphen'/> <cd:constant name='compress' value='compress'/> diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml index d8f42786a..a68e8ca0e 100644 --- a/tex/context/interface/keys-de.xml +++ b/tex/context/interface/keys-de.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='zumbefehl'/> <cd:constant name='commandbefore' value='vorbefehl'/> <cd:constant name='commands' value='befehle'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='component'/> <cd:constant name='compoundhyphen' value='compoundhyphen'/> <cd:constant name='compress' value='compress'/> diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml index 09f44dbf3..70e6ae219 100644 --- a/tex/context/interface/keys-en.xml +++ b/tex/context/interface/keys-en.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='commandafter'/> <cd:constant name='commandbefore' value='commandbefore'/> <cd:constant name='commands' value='commands'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='component'/> <cd:constant name='compoundhyphen' value='compoundhyphen'/> <cd:constant name='compress' value='compress'/> diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml index 41ce0450f..7bd22aa1e 100644 --- a/tex/context/interface/keys-fr.xml +++ b/tex/context/interface/keys-fr.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='commandeapres'/> <cd:constant name='commandbefore' value='commandeavant'/> <cd:constant name='commands' value='commandes'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='composant'/> <cd:constant name='compoundhyphen' value='compoundhyphen'/> <cd:constant name='compress' value='compress'/> diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml index e1ecdbed8..2adfd46e8 100644 --- a/tex/context/interface/keys-it.xml +++ b/tex/context/interface/keys-it.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='comdandodopo'/> <cd:constant name='commandbefore' value='comandoprima'/> <cd:constant name='commands' value='comandi'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='component'/> <cd:constant name='compoundhyphen' value='compoundhyphen'/> <cd:constant name='compress' value='compress'/> diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml index a28da1f85..eda637059 100644 --- a/tex/context/interface/keys-nl.xml +++ b/tex/context/interface/keys-nl.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='nacommando'/> <cd:constant name='commandbefore' value='voorcommando'/> <cd:constant name='commands' value='commandos'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='component'/> <cd:constant name='compoundhyphen' value='koppelteken'/> <cd:constant name='compress' value='comprimeren'/> @@ -828,7 +829,7 @@ <cd:constant name='openpage' value='openpagina'/> <cd:constant name='openpageaction' value='openpaginaactie'/> <cd:constant name='option' value='optie'/> - <cd:constant name='order' value='order'/> + <cd:constant name='order' value='volgorde'/> <cd:constant name='orientation' value='orientatie'/> <cd:constant name='otherstext' value='otherstext'/> <cd:constant name='outermargin' value='buitenmarge'/> diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml index 89307f7fa..71137281f 100644 --- a/tex/context/interface/keys-pe.xml +++ b/tex/context/interface/keys-pe.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='فرمانبعداز'/> <cd:constant name='commandbefore' value='فرمانقبلاز'/> <cd:constant name='commands' value='فرمانها'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='مولفه'/> <cd:constant name='compoundhyphen' value='compoundhyphen'/> <cd:constant name='compress' value='فشردن'/> diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml index ef715b83e..03c1d7377 100644 --- a/tex/context/interface/keys-ro.xml +++ b/tex/context/interface/keys-ro.xml @@ -609,6 +609,7 @@ <cd:constant name='commandafter' value='comandadupa'/> <cd:constant name='commandbefore' value='comandainainte'/> <cd:constant name='commands' value='comenzi'/> + <cd:constant name='compact' value='compact'/> <cd:constant name='component' value='component'/> <cd:constant name='compoundhyphen' value='compoundhyphen'/> <cd:constant name='compress' value='compress'/> diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua index f9234d8fd..455d8ba41 100644 --- a/tex/generic/context/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 06/19/11 14:17:51 +-- merge date : 06/23/11 19:25:18 do -- begin closure to overcome local limits and interference @@ -1100,6 +1100,44 @@ if not modules then modules = { } end modules ['l-lpeg'] = { local lpeg = require("lpeg") +-- tracing (only used when we encounter a problem in integration of lpeg in luatex) + +local report = texio and texio.write_nl or print + +--~ local lpmatch = lpeg.match +--~ local lpprint = lpeg.print +--~ local lpp = lpeg.P +--~ local lpr = lpeg.R +--~ local lps = lpeg.S +--~ local lpc = lpeg.C +--~ local lpb = lpeg.B +--~ local lpv = lpeg.V +--~ local lpcf = lpeg.Cf +--~ local lpcb = lpeg.Cb +--~ local lpcg = lpeg.Cg +--~ local lpct = lpeg.Ct +--~ local lpcs = lpeg.Cs +--~ local lpcc = lpeg.Cc +--~ local lpcmt = lpeg.Cmt +--~ local lpcarg = lpeg.Carg + +--~ function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end + +--~ function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end +--~ function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end +--~ function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end +--~ function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end +--~ function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end +--~ function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end +--~ function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end +--~ function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end +--~ function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end +--~ function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end +--~ function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end +--~ function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end +--~ function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end +--~ function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end + local type = type local byte, char = string.byte, string.char @@ -1208,17 +1246,17 @@ patterns.unspacer = ((patterns.spacer^1)/"")^0 patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1 patterns.beginline = #(1-newline) -local unquoted = Cs(patterns.unquoted * endofstring) -- not C - -function string.unquoted(str) - return match(unquoted,str) or str -end - --- more efficient: +-- local unquoted = Cs(patterns.unquoted * endofstring) -- not C +-- +-- function string.unquoted(str) +-- return match(unquoted,str) or str +-- end +-- +-- more efficient on long strings: local unquoted = ( - squote * Cs(1 - P(-2)) * squote - + dquote * Cs(1 - P(-2)) * dquote + squote * Cs((1 - P(-2))^0) * squote + + dquote * Cs((1 - P(-2))^0) * dquote ) function string.unquoted(str) @@ -1227,10 +1265,12 @@ end patterns.unquoted = unquoted ---~ print(string.unquoted("test")) ---~ print(string.unquoted([["t\"est"]])) ---~ print(string.unquoted([["t\"est"x]])) ---~ print(string.unquoted("\'test\'")) +-- print(string.unquoted("test")) +-- print(string.unquoted([["t\"est"]])) +-- print(string.unquoted([["t\"est"x]])) +-- print(string.unquoted("\'test\'")) +-- print(string.unquoted('"test"')) +-- print(string.unquoted('"test"')) function lpeg.anywhere(pattern) --slightly adapted from website return P { P(pattern) + 1 * V(1) } -- why so complex? @@ -1696,6 +1736,12 @@ end --~ Cf(Ct("") * (Cg(C(...) * "=" * Cs(...)))^0, rawset) +--~ for k, v in next, patterns do +--~ if type(v) ~= "table" then +--~ lpeg.print(v) +--~ end +--~ end + end -- closure do -- begin closure to overcome local limits and interference @@ -3178,14 +3224,16 @@ function constructors.calculatescale(tfmdata,scaledpoints) return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta end -function constructors.assignmathparameters(target,tfmdata) +function constructors.assignmathparameters(target,original) -- dumb version, not used in context -- when a tfm file is loaded, it has already been scaled - -- and it never enters the scaled so this is otf only + -- and it never enters the scaled so this is otf only and + -- even then we do some extra in the context math plugins local mathparameters = original.mathparameters if mathparameters and next(mathparameters) then local targetparameters = target.parameters + local targetproperties = target.properties local targetmathparameters = { } - local factor = targetparameters.factor + local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor for name, value in next, mathparameters do if name == "RadicalDegreeBottomRaisePercent" then targetmathparameters[name] = value @@ -3193,15 +3241,12 @@ function constructors.assignmathparameters(target,tfmdata) targetmathparameters[name] = value * factor end end - if not targetmathparameters.AccentBaseHeight then - targetmathparameters.AccentBaseHeight = nil -- safeguard, still needed? - end - if not targetmathparameters.FractionDelimiterSize then - targetmathparameters.FractionDelimiterSize = 0 - end - if not mathparameters.FractionDelimiterDisplayStyleSize then - targetmathparameters.FractionDelimiterDisplayStyleSize = 0 - end + -- if not targetmathparameters.FractionDelimiterSize then + -- targetmathparameters.FractionDelimiterSize = 0 + -- end + -- if not mathparameters.FractionDelimiterDisplayStyleSize then + -- targetmathparameters.FractionDelimiterDisplayStyleSize = 0 + -- end target.mathparameters = targetmathparameters end end @@ -3210,7 +3255,7 @@ function constructors.scale(tfmdata,specification) local target = { } -- the new table -- if tonumber(specification) then - specification = { size = specification } + specification = { size = specification } end -- local scaledpoints = specification.size |