summaryrefslogtreecommitdiff
path: root/tex/context/base/mkiv
diff options
context:
space:
mode:
authorHans Hagen <pragma@wxs.nl>2017-05-25 13:21:58 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2017-05-25 13:21:58 +0200
commit82aed3e7e8af29f359ebef4f93684d20e98107e6 (patch)
tree2b92c44d14566481aad5635f479b1b106d4e2347 /tex/context/base/mkiv
parentaceba29d651766f5621b9812d4c40e28029bc4ea (diff)
downloadcontext-82aed3e7e8af29f359ebef4f93684d20e98107e6.tar.gz
2017-05-25 12:56:00
Diffstat (limited to 'tex/context/base/mkiv')
-rw-r--r--tex/context/base/mkiv/char-fio.lua14
-rw-r--r--tex/context/base/mkiv/char-utf.lua3
-rw-r--r--tex/context/base/mkiv/cont-log.mkiv22
-rw-r--r--tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--tex/context/base/mkiv/cont-run.mkiv22
-rw-r--r--tex/context/base/mkiv/context.mkiv2
-rw-r--r--tex/context/base/mkiv/data-lst.lua56
-rw-r--r--tex/context/base/mkiv/data-res.lua181
-rw-r--r--tex/context/base/mkiv/data-tmp.lua2
-rw-r--r--tex/context/base/mkiv/data-use.lua4
-rw-r--r--tex/context/base/mkiv/data-zip.lua7
-rw-r--r--tex/context/base/mkiv/file-job.lua9
-rw-r--r--tex/context/base/mkiv/file-res.lua2
-rw-r--r--tex/context/base/mkiv/font-col.mkvi2
-rw-r--r--tex/context/base/mkiv/font-def.lua1
-rw-r--r--tex/context/base/mkiv/font-ext.lua49
-rw-r--r--tex/context/base/mkiv/font-gbn.lua12
-rw-r--r--tex/context/base/mkiv/font-map.lua33
-rw-r--r--tex/context/base/mkiv/font-ocl.lua2
-rw-r--r--tex/context/base/mkiv/font-otc.lua11
-rw-r--r--tex/context/base/mkiv/font-otj.lua35
-rw-r--r--tex/context/base/mkiv/font-otl.lua1
-rw-r--r--tex/context/base/mkiv/font-otn.lua14
-rw-r--r--tex/context/base/mkiv/font-otr.lua7
-rw-r--r--tex/context/base/mkiv/font-ots.lua462
-rw-r--r--tex/context/base/mkiv/font-ttf.lua43
-rw-r--r--tex/context/base/mkiv/lang-exc.lua16
-rw-r--r--tex/context/base/mkiv/lang-ini.lua20
-rw-r--r--tex/context/base/mkiv/lang-wrd.lua6
-rw-r--r--tex/context/base/mkiv/lpdf-ini.lua2
-rw-r--r--tex/context/base/mkiv/luat-fio.lua6
-rw-r--r--tex/context/base/mkiv/lxml-aux.lua9
-rw-r--r--tex/context/base/mkiv/lxml-tab.lua355
-rw-r--r--tex/context/base/mkiv/lxml-tex.lua113
-rw-r--r--tex/context/base/mkiv/math-act.lua17
-rw-r--r--tex/context/base/mkiv/math-fbk.lua62
-rw-r--r--tex/context/base/mkiv/math-noa.lua87
-rw-r--r--tex/context/base/mkiv/node-fnt.lua4
-rw-r--r--tex/context/base/mkiv/node-nut.lua54
-rw-r--r--tex/context/base/mkiv/node-syn.lua281
-rw-r--r--tex/context/base/mkiv/page-one.mkiv1
-rw-r--r--tex/context/base/mkiv/publ-aut.lua18
-rw-r--r--tex/context/base/mkiv/publ-ini.mkiv1
-rw-r--r--tex/context/base/mkiv/status-files.pdfbin25632 -> 25616 bytes
-rw-r--r--tex/context/base/mkiv/status-lua.pdfbin424884 -> 424658 bytes
-rw-r--r--tex/context/base/mkiv/tabl-frm.mkiv115
-rw-r--r--tex/context/base/mkiv/trac-set.lua27
-rw-r--r--tex/context/base/mkiv/trac-tex.lua2
48 files changed, 1456 insertions, 738 deletions
diff --git a/tex/context/base/mkiv/char-fio.lua b/tex/context/base/mkiv/char-fio.lua
index fa69d9356..9939bf041 100644
--- a/tex/context/base/mkiv/char-fio.lua
+++ b/tex/context/base/mkiv/char-fio.lua
@@ -29,10 +29,10 @@ disableaction(textfileactions, "characters.filters.utf.reorder")
appendaction (textlineactions,"system","characters.filters.utf.reorder")
disableaction(textlineactions, "characters.filters.utf.reorder")
-appendaction (textfileactions,"system","characters.filters.utf.collapse")
+appendaction (textfileactions,"system","characters.filters.utf.collapse") -- not per line
disableaction(textfileactions, "characters.filters.utf.collapse")
-appendaction (textfileactions,"system","characters.filters.utf.decompose")
+appendaction (textfileactions,"system","characters.filters.utf.decompose") -- not per line
disableaction(textfileactions, "characters.filters.utf.decompose")
local report = logs.reporter("unicode filter")
@@ -65,12 +65,14 @@ function utffilters.enable()
end
local function configure(what,v)
- if v == "line" then
+ if v == "" then
+ report("%a unset",what)
+ elseif v == "line" then
disableaction(textfileactions,what)
enableaction (textlineactions,what)
elseif not toboolean(v) then
if reporting ~= "never" then
- report("%a disabled",k)
+ report("%a disabled",what)
reporting = "yes"
end
enforced[what] = false
@@ -82,6 +84,10 @@ local function configure(what,v)
end
end
+-- first line:
+--
+-- % directives="filters.utf.collapse=true"
+
directives.register("filters.utf.reorder", function(v) configure("characters.filters.utf.reorder", v) end)
directives.register("filters.utf.collapse", function(v) configure("characters.filters.utf.collapse", v) end)
directives.register("filters.utf.decompose", function(v) configure("characters.filters.utf.decompose",v) end)
diff --git a/tex/context/base/mkiv/char-utf.lua b/tex/context/base/mkiv/char-utf.lua
index 5702f2087..5b677dbac 100644
--- a/tex/context/base/mkiv/char-utf.lua
+++ b/tex/context/base/mkiv/char-utf.lua
@@ -183,7 +183,8 @@ local p_collapse = nil -- so we can reset if needed
local function prepare()
local tree = utfchartabletopattern(collapsed)
- p_collapse = Cs((tree/collapsed + p_utf8character)^0 * P(-1)) -- the P(1) is needed in order to accept non utf
+ -- p_collapse = Cs((tree/collapsed + p_utf8character)^0 * P(-1))
+ p_collapse = Cs((tree/collapsed + p_utf8character)^0)
end
function utffilters.collapse(str,filename)
diff --git a/tex/context/base/mkiv/cont-log.mkiv b/tex/context/base/mkiv/cont-log.mkiv
index 11cdfb9b8..8b4660f3a 100644
--- a/tex/context/base/mkiv/cont-log.mkiv
+++ b/tex/context/base/mkiv/cont-log.mkiv
@@ -22,23 +22,26 @@
\unexpanded\def\TeX
{\dontleavehmode
\begingroup
- \setbox\scratchbox\hbox{M}%
T%
+ \setbox\scratchbox\hbox{M}%
\kern-.1667\wd\scratchbox
\lower.5\exheight\hbox{E}%
\kern-.125\wd\scratchbox
X%
\endgroup}
+\unexpanded\def\TeXsuffix{\wordboundary\TeX}
+\unexpanded\def\TeXprefix{\TeX\wordboundary}
+
\unexpanded\def\ConTeXt
{\dontleavehmode
\begingroup
Con%
+ \wordboundary
\setbox\scratchboxone\hbox{T\kern\zeropoint e}%
\setbox\scratchboxtwo\hbox{Te}%
\discretionary{-}{}{\kern\dimexpr\wd\scratchboxtwo-\wd\scratchboxone\relax}%
- \TeX
- t%
+ \TeX t%
\endgroup}
\unexpanded\def\PPCHTeX{ppch\TeX}
@@ -196,12 +199,12 @@
%D Some placeholders:
\unexpanded\def\eTeX {\mathematics{\varepsilon}-\TeX}
-\unexpanded\def\pdfTeX {pdf\TeX}
-\unexpanded\def\pdfeTeX {pdfe-\TeX}
-\unexpanded\def\luaTeX {lua\TeX}
+\unexpanded\def\pdfTeX {pdf\wordboundary\TeX}
+\unexpanded\def\pdfeTeX {pdfe-\wordboundary\TeX}
+\unexpanded\def\luaTeX {lua\wordboundary\TeX}
\unexpanded\def\Lua {Lua}
-\unexpanded\def\luajitTeX{luajit\TeX}
-\unexpanded\def\metaTeX {meta\TeX}
+\unexpanded\def\luajitTeX{luajit\wordboundary\TeX}
+\unexpanded\def\metaTeX {meta\wordboundary\TeX}
\unexpanded\def\XeTeX {X\lower.5\exheight\hbox{\kern-.15\emwidth\mirror{E}}\kern-.1667\emwidth\TeX}
% Adapted from a patch by Mojca:
@@ -320,8 +323,11 @@
\def\MetaFont {MetaFont}%
\def\MetaFun {MetaFun}%
\def\TeX {TeX}%
+ \def\TeXsuffix{TeX}%
+ \def\TeXprefix{TeX}%
\def\LuaTeX {LuaTeX}%
\def\LuajitTeX{LuajitTeX}%
+ \let\wordboundary\empty
\to \everysimplifycommands
\protect \endinput
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index 3aacc978c..a80d41e5e 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2017.05.12 22:40}
+\newcontextversion{2017.05.25 12:50}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/mkiv/cont-run.mkiv b/tex/context/base/mkiv/cont-run.mkiv
index 490c6bee2..68b2f635f 100644
--- a/tex/context/base/mkiv/cont-run.mkiv
+++ b/tex/context/base/mkiv/cont-run.mkiv
@@ -13,9 +13,27 @@
\writestatus{loading}{ConTeXt Core Macros / Runner}
-\unprotect
-
\registerctxluafile{node-syn}{1.001}
\registerctxluafile{cont-run}{1.001}
+% \enabletrackers[system.synctex.visualize]
+% \enabletrackers[system.synctex.xml]
+% \enabledirectives[system.synctex.details]
+% \setupsynctex[state=start]
+
+\unprotect
+
+\let\synctexsetfilename \clf_synctexsetfilename
+\let\synctexresetfilename\clf_synctexresetfilename
+\let\synctexblockfilename\clf_synctexblockfilename
+
+\unexpanded\def\setupsynctex[#1]%
+ {\begingroup
+ \getdummyparameters[\c!state=,#1]%
+ \doifelse{\dummyparameter\c!state}\v!start\clf_synctexenable\clf_synctexdisable
+ \endgroup}
+
+\unexpanded\def\blocksynctexfile[#1]%
+ {\processcommacommand{#1}\synctexblockfilename}
+
\protect \endinput
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index 6d946e0bc..e7037678d 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -39,7 +39,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2017.05.12 22:40}
+\edef\contextversion{2017.05.25 12:50}
\edef\contextkind {beta}
%D For those who want to use this:
diff --git a/tex/context/base/mkiv/data-lst.lua b/tex/context/base/mkiv/data-lst.lua
index e4621a6e1..f061393e5 100644
--- a/tex/context/base/mkiv/data-lst.lua
+++ b/tex/context/base/mkiv/data-lst.lua
@@ -8,18 +8,17 @@ if not modules then modules = { } end modules ['data-lst'] = {
-- used in mtxrun, can be loaded later .. todo
-local rawget, type, next = rawget, type, next
+local type = type
+local concat, sortedhash = table.concat,table.sortedhash
-local find, concat, upper = string.find, table.concat, string.upper
-local fastcopy, sortedpairs = table.fastcopy, table.sortedpairs
+local resolvers = resolvers
+local listers = resolvers.listers or { }
+resolvers.listers = listers
-local resolvers = resolvers
-local listers = resolvers.listers or { }
-resolvers.listers = listers
+local resolveprefix = resolvers.resolve
-local resolveprefix = resolvers.resolve
-
-local report_lists = logs.reporter("resolvers","lists")
+local report_lists = logs.reporter("resolvers","lists")
+local report_resolved = logs.reporter("system","resolved")
local function tabstr(str)
if type(str) == 'table' then
@@ -30,41 +29,18 @@ local function tabstr(str)
end
function listers.variables(pattern)
- local instance = resolvers.instance
- local environment = instance.environment
- local variables = instance.variables
- local expansions = instance.expansions
- local pattern = upper(pattern or "")
- local configured = { }
- local order = instance.order
- for i=1,#order do
- for k, v in next, order[i] do
- if v ~= nil and configured[k] == nil then
- configured[k] = v
- end
- end
+ local result = resolvers.knownvariables(pattern)
+ for key, value in sortedhash(result) do
+ report_lists(key)
+ report_lists(" env: %s",tabstr(value.environment or "unset"))
+ report_lists(" var: %s",tabstr(value.variable or "unset"))
+ report_lists(" exp: %s",tabstr(value.expansion or "unset"))
+ report_lists(" res: %s",tabstr(value.resolved or "unset"))
end
- local env = fastcopy(environment)
- local var = fastcopy(variables)
- local exp = fastcopy(expansions)
- for key, value in sortedpairs(configured) do
- if key ~= "" and (pattern == "" or find(upper(key),pattern)) then
- report_lists(key)
- report_lists(" env: %s",tabstr(rawget(environment,key)) or "unset")
- report_lists(" var: %s",tabstr(configured[key]) or "unset")
- report_lists(" exp: %s",tabstr(expansions[key]) or "unset")
- report_lists(" res: %s",tabstr(resolveprefix(expansions[key])) or "unset")
- end
- end
- instance.environment = fastcopy(env)
- instance.variables = fastcopy(var)
- instance.expansions = fastcopy(exp)
end
-local report_resolved = logs.reporter("system","resolved")
-
function listers.configurations()
- local configurations = resolvers.instance.specification
+ local configurations = resolvers.configurationfiles()
for i=1,#configurations do
report_resolved("file : %s",resolveprefix(configurations[i]))
end
diff --git a/tex/context/base/mkiv/data-res.lua b/tex/context/base/mkiv/data-res.lua
index 4f171c445..d826d0987 100644
--- a/tex/context/base/mkiv/data-res.lua
+++ b/tex/context/base/mkiv/data-res.lua
@@ -6,15 +6,6 @@ if not modules then modules = { } end modules ['data-res'] = {
license = "see context related readme files",
}
--- In practice we will work within one tds tree, but i want to keep
--- the option open to build tools that look at multiple trees, which is
--- why we keep the tree specific data in a table. We used to pass the
--- instance but for practical purposes we now avoid this and use a
--- instance variable. We always have one instance active (sort of global).
-
--- I will reimplement this module ... way too fuzzy now and we can work
--- with some sensible constraints as it is only is used for context.
-
-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local gsub, find, lower, upper, match, gmatch = string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
@@ -154,8 +145,7 @@ local suffixmap = resolvers.suffixmap
resolvers.defaultsuffixes = { "tex" } -- "mkiv", "cld" -- too tricky
-resolvers.instance = resolvers.instance or nil -- the current one (slow access)
-local instance = resolvers.instance or nil -- the current one (fast access)
+local instance = nil -- the current one (fast access)
-- An instance has an environment (coming from the outside, kept raw), variables
-- (coming from the configuration file), and expansions (variables with nested
@@ -218,7 +208,7 @@ local function expandedvariable(var)
return lpegmatch(variableexpander,var) or var
end
-function resolvers.newinstance() -- todo: all vars will become lowercase and alphanum only
+function resolvers.reset()
-- normally we only need one instance but for special cases we can (re)load one so
-- we stick to this model.
@@ -227,24 +217,27 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
report_resolving("creating instance")
end
- local environment, variables, expansions, order = allocate(), allocate(), allocate(), allocate()
+ local environment = { }
+ local variables = { }
+ local expansions = { }
+ local order = { }
- local newinstance = {
+ instance = {
environment = environment,
variables = variables,
expansions = expansions,
order = order,
- files = allocate(),
- setups = allocate(),
- found = allocate(),
- foundintrees = allocate(),
- hashes = allocate(),
- hashed = allocate(),
+ files = { },
+ setups = { },
+ found = { },
+ foundintrees = { },
+ hashes = { },
+ hashed = { },
pathlists = false,-- delayed
- specification = allocate(),
- lists = allocate(),
- data = allocate(), -- only for loading
- fakepaths = allocate(),
+ specification = { },
+ lists = { },
+ data = { }, -- only for loading
+ fakepaths = { },
remember = true,
diskcache = true,
renewcache = false,
@@ -295,18 +288,10 @@ function resolvers.newinstance() -- todo: all vars will become lowercase and alp
return v
end)
- return newinstance
-
-end
-
-function resolvers.setinstance(someinstance) -- only one instance is active
- instance = someinstance
- resolvers.instance = someinstance
- return someinstance
end
-function resolvers.reset()
- return resolvers.setinstance(resolvers.newinstance())
+function resolvers.initialized()
+ return instance ~= nil
end
local function reset_hashes()
@@ -501,10 +486,15 @@ local function load_configuration_files()
end
end
+function resolvers.configurationfiles()
+ return instance.specification or { }
+end
+
-- scheme magic ... database loading
local function load_file_databases()
- instance.loaderror, instance.files = false, allocate()
+ instance.loaderror = false
+ instance.files = { }
if not instance.renewcache then
local hashes = instance.hashes
for k=1,#hashes do
@@ -693,13 +683,13 @@ function resolvers.datastate()
end
function resolvers.variable(name)
- local name = name and lpegmatch(dollarstripper,name)
+ local name = name and lpegmatch(dollarstripper,name)
local result = name and instance.variables[name]
return result ~= nil and result or ""
end
function resolvers.expansion(name)
- local name = name and lpegmatch(dollarstripper,name)
+ local name = name and lpegmatch(dollarstripper,name)
local result = name and instance.expansions[name]
return result ~= nil and result or ""
end
@@ -745,7 +735,7 @@ end
local done = { }
-function resolvers.resetextrapath()
+function resolvers.resetextrapaths()
local ep = instance.extra_paths
if not ep then
done = { }
@@ -756,6 +746,10 @@ function resolvers.resetextrapath()
end
end
+function resolvers.getextrapaths()
+ return instance.extra_paths or { }
+end
+
function resolvers.registerextrapath(paths,subpaths)
if not subpaths or subpaths == "" then
if not paths or path == "" then
@@ -932,11 +926,7 @@ function resolvers.cleanedpathlist(v) -- can be cached if needed
end
function resolvers.expandbraces(str) -- output variable and brace expansion of STRING
--- local ori = resolvers.variable(str)
--- if ori == "" then
- local ori = str
--- end
- local pth = expandedpathfromlist(resolvers.splitpath(ori))
+ local pth = expandedpathfromlist(resolvers.splitpath(str))
return joinpath(pth)
end
@@ -951,6 +941,20 @@ function resolvers.registerfilehash(name,content,someerror)
end
end
+function resolvers.getfilehashes()
+ return instance and instance.files or { }
+end
+
+function resolvers.gethashes()
+ return instance and instance.hashes or { }
+end
+
+function resolvers.renewcache()
+ if instance then
+ instance.renewcache = true
+ end
+end
+
local function isreadable(name)
local readable = isfile(name) -- not file.is_readable(name) asit can be a dir
if trace_detail then
@@ -1029,18 +1033,29 @@ function resolvers.registerintrees(filename,format,filetype,usedmethod,foundname
if usedmethod == "direct" and filename == foundname and fit[foundname] then
-- just an extra lookup after a test on presence
else
+ local collapsed = collapsepath(foundname,true)
local t = {
filename = filename,
- format = format ~= "" and format or nil,
+ format = format ~= "" and format or nil,
filetype = filetype ~= "" and filetype or nil,
usedmethod = usedmethod,
foundname = foundname,
+ fullname = collapsed,
}
fit[foundname] = t
foundintrees[#foundintrees+1] = t
end
end
+function resolvers.foundintrees()
+ return instance.foundintrees or { }
+end
+
+function resolvers.foundintree(fullname)
+ local f = fit[fullname]
+ return f and f.usedmethod == "database"
+end
+
-- split the next one up for readability (but this module needs a cleanup anyway)
local function can_be_dir(name) -- can become local
@@ -1062,15 +1077,17 @@ local preparetreepattern = Cs((P(".")/"%%." + P("-")/"%%-" + P(1))^0 * Cc("$"))
local collect_instance_files
local function find_analyze(filename,askedformat,allresults)
- local filetype, wantedfiles, ext = '', { }, suffixonly(filename)
+ local filetype = ''
+ local filesuffix = suffixonly(filename)
+ local wantedfiles = { }
-- too tricky as filename can be bla.1.2.3:
--
- -- if not suffixmap[ext] then
+ -- if not suffixmap[filesuffix] then
-- wantedfiles[#wantedfiles+1] = filename
-- end
wantedfiles[#wantedfiles+1] = filename
if askedformat == "" then
- if ext == "" or not suffixmap[ext] then
+ if filesuffix == "" or not suffixmap[filesuffix] then
local defaultsuffixes = resolvers.defaultsuffixes
local formatofsuffix = resolvers.formatofsuffix
for i=1,#defaultsuffixes do
@@ -1088,7 +1105,7 @@ local function find_analyze(filename,askedformat,allresults)
end
end
else
- if ext == "" or not suffixmap[ext] then
+ if filesuffix == "" or not suffixmap[filesuffix] then
local format_suffixes = suffixes[askedformat]
if format_suffixes then
for i=1,#format_suffixes do
@@ -1160,11 +1177,10 @@ local function find_qualified(filename,allresults,askedformat,alsostripped) -- t
if alsostripped and suffix and suffix ~= "" then
-- try to find in tree (no suffix manipulation), here we search for the
-- matching last part of the name
- local basename = filebasename(filename)
- local pattern = lpegmatch(preparetreepattern,filename)
- -- messy .. to be sorted out
+ local basename = filebasename(filename)
+ local pattern = lpegmatch(preparetreepattern,filename)
local savedformat = askedformat
- local format = savedformat or ""
+ local format = savedformat or ""
if format == "" then
askedformat = resolvers.formatofsuffix(suffix)
end
@@ -1269,7 +1285,7 @@ end
local function find_intree(filename,filetype,wantedfiles,allresults)
local pathlists = instance.pathlists
if not pathlists then
- pathlists = setmetatableindex(allocate(),makepathlist)
+ pathlists = setmetatableindex({ },makepathlist)
instance.pathlists = pathlists
end
local pathlist = pathlists[filetype]
@@ -1588,9 +1604,9 @@ function resolvers.findpath(filename,filetype)
end
local function findgivenfiles(filename,allresults)
- local base = filebasename(filename)
- local result = { }
- local hashes = instance.hashes
+ local base = filebasename(filename)
+ local result = { }
+ local hashes = instance.hashes
--
local function okay(hash,path,name)
local found = methodhandler('concatinators',hash.type,hash.name,path,name)
@@ -1645,12 +1661,12 @@ end
-- why bother
local function findwildcardfiles(filename,allresults,result)
- local result = result or { }
- local base = filebasename(filename)
- local dirn = filedirname(filename)
- local path = lower(lpegmatch(makewildcard,dirn) or dirn)
- local name = lower(lpegmatch(makewildcard,base) or base)
- local files = instance.files
+ local result = result or { }
+ local base = filebasename(filename)
+ local dirn = filedirname(filename)
+ local path = lower(lpegmatch(makewildcard,dirn) or dirn)
+ local name = lower(lpegmatch(makewildcard,base) or base)
+ local files = instance.files
--
if find(name,"*",1,true) then
local hashes = instance.hashes
@@ -1736,15 +1752,23 @@ function resolvers.automount()
-- implemented later
end
-function resolvers.load(option)
+function resolvers.starttiming()
statistics.starttiming(instance)
+end
+
+function resolvers.stoptiming()
+ statistics.stoptiming(instance)
+end
+
+function resolvers.load(option)
+ resolvers.starttiming()
identify_configuration_files()
load_configuration_files()
if option ~= "nofiles" then
load_databases()
resolvers.automount()
end
- statistics.stoptiming(instance)
+ resolvers.stoptiming()
local files = instance.files
return files and next(files) and true
end
@@ -1851,7 +1875,6 @@ function resolvers.booleanvariable(str,default)
end
function resolvers.dowithfilesintree(pattern,handle,before,after) -- will move, can be a nice iterator instead
- local instance = resolvers.instance
local hashes = instance.hashes
for i=1,#hashes do
local hash = hashes[i]
@@ -1891,3 +1914,31 @@ resolvers.obsolete = obsolete
resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile
resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles
+
+-- moved here
+
+function resolvers.knownvariables(pattern)
+ if instance then
+ local environment = instance.environment
+ local variables = instance.variables
+ local expansions = instance.expansions
+ local order = instance.order
+ local pattern = upper(pattern or "")
+ local result = { }
+ for i=1,#order do
+ for key in next, order[i] do
+ if result[key] == nil and key ~= "" and (pattern == "" or find(upper(key),pattern)) then
+ result[key] = {
+ environment = rawget(environment,key),
+ variable = key,
+ expansion = expansions[key],
+ resolved = resolveprefix(expansions[key]),
+ }
+ end
+ end
+ end
+ return result
+ else
+ return { }
+ end
+end
diff --git a/tex/context/base/mkiv/data-tmp.lua b/tex/context/base/mkiv/data-tmp.lua
index eabfce96e..e1903fd82 100644
--- a/tex/context/base/mkiv/data-tmp.lua
+++ b/tex/context/base/mkiv/data-tmp.lua
@@ -190,7 +190,7 @@ function caches.usedpaths(separator)
end
function caches.configfiles()
- return concat(resolvers.instance.specification,";")
+ return concat(resolvers.configurationfiles(),";")
end
function caches.hashed(tree)
diff --git a/tex/context/base/mkiv/data-use.lua b/tex/context/base/mkiv/data-use.lua
index 930c5739f..65fcc5dc3 100644
--- a/tex/context/base/mkiv/data-use.lua
+++ b/tex/context/base/mkiv/data-use.lua
@@ -24,7 +24,7 @@ function resolvers.automount(usecache)
mountpaths = caches.getreadablepaths("mount")
end
if mountpaths and #mountpaths > 0 then
- statistics.starttiming(resolvers.instance)
+ resolvers.starttiming()
for k=1,#mountpaths do
local root = mountpaths[k]
local f = io.open(root.."/url.tmi")
@@ -45,7 +45,7 @@ function resolvers.automount(usecache)
f:close()
end
end
- statistics.stoptiming(resolvers.instance)
+ resolvers.stoptiming()
end
end
diff --git a/tex/context/base/mkiv/data-zip.lua b/tex/context/base/mkiv/data-zip.lua
index 2be88e0fc..32666bef2 100644
--- a/tex/context/base/mkiv/data-zip.lua
+++ b/tex/context/base/mkiv/data-zip.lua
@@ -203,17 +203,16 @@ function resolvers.usezipfile(archive)
if archive and not registeredfiles[archive] then
local z = zip.openarchive(archive)
if z then
- local instance = resolvers.instance
local tree = url.query(specification.query).tree or ""
if trace_locating then
report_zip("registering: archive %a",archive)
end
- statistics.starttiming(instance)
+ resolvers.starttiming()
resolvers.prependhash('zip',archive)
resolvers.extendtexmfvariable(archive) -- resets hashes too
registeredfiles[archive] = z
- instance.files[archive] = resolvers.registerzipfile(z,tree)
- statistics.stoptiming(instance)
+ resolvers.registerfilehash(archive,resolvers.registerzipfile(z,tree))
+ resolvers.stoptiming()
elseif trace_locating then
report_zip("registering: unknown archive %a",archive)
end
diff --git a/tex/context/base/mkiv/file-job.lua b/tex/context/base/mkiv/file-job.lua
index c7c36a03f..81f0753a0 100644
--- a/tex/context/base/mkiv/file-job.lua
+++ b/tex/context/base/mkiv/file-job.lua
@@ -55,7 +55,8 @@ local hasscheme = url.hasscheme
local jobresolvers = resolvers.jobs
local registerextrapath = resolvers.registerextrapath
-local resetextrapath = resolvers.resetextrapath
+local resetextrapaths = resolvers.resetextrapaths
+local getextrapaths = resolvers.getextrapath
local pushextrapath = resolvers.pushextrapath
local popextrapath = resolvers.popextrapath
@@ -147,14 +148,14 @@ implement {
name = "resetpath",
actions = function()
report_jobfiles("resetting path")
- resetextrapath()
+ resetextrapaths()
end
}
implement {
name = "allinputpaths",
actions = function()
- context(concat(resolvers.instance.extra_paths or { },","))
+ context(concat(getextrapaths(),","))
end
}
@@ -1100,7 +1101,7 @@ local report_file = logs.reporter("used file")
local report_option = logs.reporter("used option")
luatex.registerstopactions(function()
- local foundintrees = resolvers.instance.foundintrees
+ local foundintrees = resolvers.foundintrees()
if #foundintrees > 0 then
logspushtarget("logfile")
logsnewline()
diff --git a/tex/context/base/mkiv/file-res.lua b/tex/context/base/mkiv/file-res.lua
index 44117ed46..531d365da 100644
--- a/tex/context/base/mkiv/file-res.lua
+++ b/tex/context/base/mkiv/file-res.lua
@@ -78,7 +78,7 @@ local function readfilename(specification,backtrack,treetoo)
end
end
if not fnd then
- local paths = resolvers.instance.extra_paths
+ local paths = resolvers.getextrapaths()
if paths then
for i=1,#paths do
for i=1,#names do
diff --git a/tex/context/base/mkiv/font-col.mkvi b/tex/context/base/mkiv/font-col.mkvi
index 88a3ff941..b13047e50 100644
--- a/tex/context/base/mkiv/font-col.mkvi
+++ b/tex/context/base/mkiv/font-col.mkvi
@@ -100,7 +100,7 @@
{\doifelsenothing{#3}%
{\definedfont[#2 at #4sp]}%
{\definedfont[#2*#3\space at #4\scaledpoint]}%
- \ctxlua{mathematics.registerfallbackid(#1,\fontid\font)}}
+ \clf_registerfontfallbackid#1\space\fontid\font\space{#2}}
% \def\font_fallbacks_finish_math
% {\ctxlua{mathematics.finishfallbacks()}}
diff --git a/tex/context/base/mkiv/font-def.lua b/tex/context/base/mkiv/font-def.lua
index c8394badf..a362d8967 100644
--- a/tex/context/base/mkiv/font-def.lua
+++ b/tex/context/base/mkiv/font-def.lua
@@ -21,7 +21,6 @@ local trace_defining = false trackers .register("fonts.defining", function
local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
-trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
local report_defining = logs.reporter("fonts","defining")
diff --git a/tex/context/base/mkiv/font-ext.lua b/tex/context/base/mkiv/font-ext.lua
index 6edfe7025..965b6e6dc 100644
--- a/tex/context/base/mkiv/font-ext.lua
+++ b/tex/context/base/mkiv/font-ext.lua
@@ -1296,3 +1296,52 @@ do -- another hack for a crappy font
}
end
+
+do
+
+ local tounicode = fonts.mappings.tounicode
+
+ local function check(tfmdata,key,value)
+ if value == "ligatures" then
+ local private = fonts.constructors and fonts.constructors.privateoffset or 0xF0000
+ local collected = fonts.handlers.otf.readers.getcomponents(tfmdata.shared.rawdata)
+ if collected and next(collected)then
+ for unicode, char in next, tfmdata.characters do
+ if true then -- if unicode >= private or (unicode >= 0xE000 and unicode <= 0xF8FF) then
+ local u = collected[unicode]
+ if u then
+ local n = #u
+ for i=1,n do
+ if u[i] > private then
+ n = 0
+ break
+ end
+ end
+ if n > 0 then
+ if n == 1 then
+ u = u[1]
+ end
+ char.unicode = u
+ char.tounicode = tounicode(u)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ -- forceunicodes=ligatures : aggressive lig resolving (e.g. for emoji)
+ --
+ -- kind of like: \enabletrackers[fonts.mapping.forceligatures]
+
+ registerotffeature {
+ name = "forceunicodes",
+ description = "forceunicodes",
+ manipulators = {
+ base = check,
+ node = check,
+ }
+ }
+
+end
diff --git a/tex/context/base/mkiv/font-gbn.lua b/tex/context/base/mkiv/font-gbn.lua
index 1f8df642c..6742f90fb 100644
--- a/tex/context/base/mkiv/font-gbn.lua
+++ b/tex/context/base/mkiv/font-gbn.lua
@@ -71,7 +71,8 @@ function nodes.handlers.setbasemodepass(v)
basemodepass = v
end
-function nodes.handlers.nodepass(head)
+-------- nodes.handlers.nodepass(head)
+function nodes.handlers.nodepass(head,groupcode,size,packtype,direction)
local fontdata = fonts.hashes.identifiers
if fontdata then
local nuthead = tonut(head)
@@ -81,6 +82,7 @@ function nodes.handlers.nodepass(head)
local basefont = nil
local variants = nil
local redundant = nil
+ local nofused = 0
for n in traverse_id(glyph_code,nuthead) do
local font = getfont(n)
if font ~= prevfont then
@@ -97,6 +99,7 @@ function nodes.handlers.nodepass(head)
local processors = shared.processes
if processors and #processors > 0 then
usedfonts[font] = processors
+ nofused = nofused + 1
elseif basemodepass then
basefont = { n, nil }
basefonts[#basefonts+1] = basefont
@@ -178,6 +181,7 @@ function nodes.handlers.nodepass(head)
local processors = shared.processes
if processors and #processors > 0 then
usedfonts[font] = processors
+ nofused = nofused + 1
end
end
end
@@ -189,7 +193,7 @@ function nodes.handlers.nodepass(head)
if next(usedfonts) then
for font, processors in next, usedfonts do
for i=1,#processors do
- head = processors[i](head,font,0) or head
+ head = processors[i](head,font,0,direction,nofused) or head
end
end
end
@@ -241,9 +245,9 @@ local basepass = nodes.handlers.basepass
local injectpass = nodes.injections.handler
local protectpass = nodes.handlers.protectglyphs
-function nodes.simple_font_handler(head)
+function nodes.simple_font_handler(head,groupcode,size,packtype,direction)
if head then
- head = nodepass(head)
+ head = nodepass(head,groupcode,size,packtype,direction)
head = injectpass(head)
if not basemodepass then
head = basepass(head)
diff --git a/tex/context/base/mkiv/font-map.lua b/tex/context/base/mkiv/font-map.lua
index cf369708c..706c5b709 100644
--- a/tex/context/base/mkiv/font-map.lua
+++ b/tex/context/base/mkiv/font-map.lua
@@ -19,9 +19,9 @@ local trace_mapping = false trackers.register("fonts.mapping", function(v) trac
local report_fonts = logs.reporter("fonts","loading") -- not otf only
--- force_ligatures is true per 2017-04-20 so that these emoji's with bad names work too
+-- force_ligatures was true for a while so that these emoji's with bad names work too
-local force_ligatures = true directives.register("fonts.mapping.forceligatures",function(v) force_ligatures = v end)
+local force_ligatures = false directives.register("fonts.mapping.forceligatures",function(v) force_ligatures = v end)
local fonts = fonts or { }
local mappings = fonts.mappings or { }
@@ -279,6 +279,9 @@ do
ffl = { name = "f_f_l", unicode = { 0x66, 0x66, 0x6C }, mess = 0xFB04 },
fj = { name = "f_j", unicode = { 0x66, 0x6A } },
fk = { name = "f_k", unicode = { 0x66, 0x6B } },
+
+ -- endash = { name = "endash", unicode = 0x2013, mess = 0x2013 },
+ -- emdash = { name = "emdash", unicode = 0x2014, mess = 0x2014 },
}
local o = { }
@@ -299,7 +302,7 @@ do
end
-function mappings.addtounicode(data,filename,checklookups)
+function mappings.addtounicode(data,filename,checklookups,forceligatures)
local resources = data.resources
local unicodes = resources.unicodes
if not unicodes then
@@ -517,22 +520,26 @@ function mappings.addtounicode(data,filename,checklookups)
if not collected then
-- move on
- elseif force_ligatures then
+ elseif forceligatures or force_ligatures then
for i=1,#dlist do
local du = dlist[i]
- local u = collected[du] -- always tables
- if u then
- resolve(descriptions[du],u)
+ if du >= private or (du >= 0xE000 and du <= 0xF8FF) then
+ local u = collected[du] -- always tables
+ if u then
+ resolve(descriptions[du],u)
+ end
end
end
else
for i=1,#dlist do
- local du = dlist[i]
- local glyph = descriptions[du]
- if glyph.class == "ligature" and not glyph.unicode then
- local u = collected[du] -- always tables
- if u then
- resolve(glyph,u)
+ local du = dlist[i]
+ if du >= private or (du >= 0xE000 and du <= 0xF8FF) then
+ local glyph = descriptions[du]
+ if glyph.class == "ligature" and not glyph.unicode then
+ local u = collected[du] -- always tables
+ if u then
+ resolve(glyph,u)
+ end
end
end
end
diff --git a/tex/context/base/mkiv/font-ocl.lua b/tex/context/base/mkiv/font-ocl.lua
index c166f5ce4..2ecf1ba42 100644
--- a/tex/context/base/mkiv/font-ocl.lua
+++ b/tex/context/base/mkiv/font-ocl.lua
@@ -40,7 +40,7 @@ else
function otf.getactualtext(s)
return
- "/Span << /ActualText <feff" .. n .. "> >> BDC",
+ "/Span << /ActualText <feff" .. s .. "> >> BDC",
"EMC"
end
diff --git a/tex/context/base/mkiv/font-otc.lua b/tex/context/base/mkiv/font-otc.lua
index 5d879ec1d..034cba613 100644
--- a/tex/context/base/mkiv/font-otc.lua
+++ b/tex/context/base/mkiv/font-otc.lua
@@ -29,6 +29,7 @@ local normalized = {
multiple = "multiple",
kern = "kern",
pair = "pair",
+ single = "single",
chainsubstitution = "chainsubstitution",
chainposition = "chainposition",
}
@@ -40,6 +41,7 @@ local types = {
multiple = "gsub_multiple",
kern = "gpos_pair",
pair = "gpos_pair",
+ single = "gpos_single",
chainsubstitution = "gsub_contextchain",
chainposition = "gpos_contextchain",
}
@@ -403,6 +405,8 @@ local function addfeature(data,feature,specifications)
return coverage
end
+ local prepare_single = prepare_pair
+
local function prepare_chain(list,featuretype,sublookups)
-- todo: coveractions
local rules = list.rules
@@ -627,6 +631,9 @@ local function addfeature(data,feature,specifications)
elseif featuretype == "pair" then
format = "pair"
coverage = prepare_pair(list,featuretype)
+ elseif featuretype == "single" then
+ format = "single"
+ coverage = prepare_single(list,featuretype)
end
if coverage and next(coverage) then
nofsteps = nofsteps + 1
@@ -666,6 +673,10 @@ local function addfeature(data,feature,specifications)
category = "gpos"
format = "pair"
coverage = prepare_pair(list,featuretype)
+ elseif featuretype == "single" then
+ category = "gpos"
+ format = "single"
+ coverage = prepare_single(list,featuretype)
elseif featuretype == "chainsubstitution" then
category = "gsub"
coverage = prepare_chain(list,featuretype,sublookups)
diff --git a/tex/context/base/mkiv/font-otj.lua b/tex/context/base/mkiv/font-otj.lua
index 634f8a83c..2c79500e7 100644
--- a/tex/context/base/mkiv/font-otj.lua
+++ b/tex/context/base/mkiv/font-otj.lua
@@ -1434,6 +1434,35 @@ function injections.isspace(n,threshold,id)
end
end
+-- We have a plugin so that Kai can use the next in plain. Such a plugin is rather application
+-- specific.
+--
+-- local getboth = nodes.direct.getboth
+-- local getid = nodes.direct.getid
+-- local getprev = nodes.direct.getprev
+-- local getnext = nodes.direct.getnext
+--
+-- local whatsit_code = nodes.nodecodes.whatsit
+-- local glyph_code = nodes.nodecodes.glyph
+--
+-- local function getspaceboth(n) -- fragile: what it prev/next has no width field
+-- local prev, next = getboth(n)
+-- while prev and (getid(prev) == whatsit_code or (getwidth(prev) == 0 and getid(prev) ~= glyph_code)) do
+-- prev = getprev(prev)
+-- end
+-- while next and (getid(next) == whatsit_code or (getwidth(next) == 0 and getid(next) ~= glyph_code)) do
+-- next = getnext(next)
+-- end
+-- end
+--
+-- injections.installgetspaceboth(getspaceboth)
+
+local getspaceboth = getboth
+
+function injections.installgetspaceboth(gb)
+ getspaceboth = gb or getboth
+end
+
local function injectspaces(head)
if not triggers then
@@ -1458,9 +1487,9 @@ local function injectspaces(head)
end
for n in traverse_id(glue_code,tonut(head)) do
- local prev, next = getboth(n)
- local prevchar = ischar(prev)
- local nextchar = ischar(next)
+ local prev, next = getspaceboth(n)
+ local prevchar = prev and ischar(prev)
+ local nextchar = next and ischar(next)
if nextchar then
local font = getfont(next)
local trig = triggers[font]
diff --git a/tex/context/base/mkiv/font-otl.lua b/tex/context/base/mkiv/font-otl.lua
index 9400096a0..bbe05304c 100644
--- a/tex/context/base/mkiv/font-otl.lua
+++ b/tex/context/base/mkiv/font-otl.lua
@@ -768,6 +768,7 @@ otf.coverup = {
multiple = justset,
kern = justset,
pair = justset,
+ single = justset,
ligature = function(coverage,unicode,ligature)
local first = ligature[1]
local tree = coverage[first]
diff --git a/tex/context/base/mkiv/font-otn.lua b/tex/context/base/mkiv/font-otn.lua
index ace7bf12b..d48021347 100644
--- a/tex/context/base/mkiv/font-otn.lua
+++ b/tex/context/base/mkiv/font-otn.lua
@@ -194,15 +194,13 @@ local report_process = logs.reporter("fonts","otf process")
local report_prepare = logs.reporter("fonts","otf prepare")
local report_run = logs.reporter("fonts","otf run")
-registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.substitutions", "otf.singles","otf.multiples","otf.alternatives","otf.ligatures")
+registertracker("otf.positions", "otf.marks","otf.kerns","otf.cursive")
+registertracker("otf.actions", "otf.substitutions","otf.positions")
+registertracker("otf.sample", "otf.steps","otf.substitutions","otf.positions","otf.analyzing")
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
-registertracker("otf.actions","otf.replacements,otf.positions")
-registertracker("otf.injections","nodes.injections")
-
-registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+registertracker("otf.chain.verbose", function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.chain.normal", function(v) otf.setcontextchain(v and "normal") end)
local nuts = nodes.nuts
local tonode = nuts.tonode
diff --git a/tex/context/base/mkiv/font-otr.lua b/tex/context/base/mkiv/font-otr.lua
index 3addf3324..4f93c5579 100644
--- a/tex/context/base/mkiv/font-otr.lua
+++ b/tex/context/base/mkiv/font-otr.lua
@@ -1130,9 +1130,10 @@ readers.hmtx = function(f,fontdata,specification)
if width ~= 0 then
glyph.width = width
end
- -- if leftsidebearing ~= 0 then
- -- glyph.lsb = leftsidebearing
- -- end
+-- for now
+-- if leftsidebearing ~= 0 then
+-- glyph.lsb = leftsidebearing
+-- end
end
-- The next can happen in for instance a monospace font or in a cjk font
-- with fixed widths.
diff --git a/tex/context/base/mkiv/font-ots.lua b/tex/context/base/mkiv/font-ots.lua
index 16c2ce735..1230475b3 100644
--- a/tex/context/base/mkiv/font-ots.lua
+++ b/tex/context/base/mkiv/font-ots.lua
@@ -155,11 +155,10 @@ local report_process = logs.reporter("fonts","otf process")
local report_warning = logs.reporter("fonts","otf warning")
local report_run = logs.reporter("fonts","otf run")
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
-registertracker("otf.actions","otf.replacements,otf.positions")
-registertracker("otf.injections","nodes.injections")
-registertracker("otf.sample","otf.steps,otf.actions,otf.analyzing")
+registertracker("otf.substitutions", "otf.singles","otf.multiples","otf.alternatives","otf.ligatures")
+registertracker("otf.positions", "otf.marks","otf.kerns","otf.cursive")
+registertracker("otf.actions", "otf.substitutions","otf.positions")
+registertracker("otf.sample", "otf.steps","otf.substitutions","otf.positions","otf.analyzing")
local nuts = nodes.nuts
local tonode = nuts.tonode
@@ -192,6 +191,7 @@ local getdir = nuts.getdir
local getwidth = nuts.getwidth
local ischar = nuts.is_char
+local usesfont = nuts.uses_font
local insert_node_after = nuts.insert_after
local copy_node = nuts.copy
@@ -932,7 +932,8 @@ end
function handlers.gpos_single(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
local startchar = getchar(start)
- if step.format == "pair" then
+-- if step.format == "pair" then
+ if step.format == "pair" or type(kerns) == "table" then
local dx, dy, w, h = setpair(start,factor,rlmode,sequence.flags[4],kerns,injection)
if trace_kerns then
logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(dataset,sequence),gref(startchar),dx,dy,w,h)
@@ -3830,132 +3831,213 @@ otf.helpers.pardirstate = pardirstate
-- optimizations the principles of processing the features hasn't changed much since
-- the beginning.
-local function featuresprocessor(head,font,attr,direction)
+do
- local sequences = sequencelists[font] -- temp hack
+ -- experimental speedup (only with hyphenated text and multiple fonts per processing)
+ --
+ -- at some point this might become true by default
- if not sequencelists then
- return head, false
- end
+ local fastdisc = false directives.register("otf.fastdisc",function(v) fastdisc = v end)
- nesting = nesting + 1
+ function otf.featuresprocessor(head,font,attr,direction,n)
- if nesting == 1 then
- currentfont = font
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions -- only needed in gref so we could pass node there instead
- characters = tfmdata.characters -- but this branch is not entered that often anyway
- local resources = tfmdata.resources
- marks = resources.marks
- classes = resources.classes
- threshold,
- factor = getthreshold(font)
- checkmarks = tfmdata.properties.checkmarks
+ local sequences = sequencelists[font] -- temp hack
- elseif currentfont ~= font then
+ if not sequencelists then
+ return head, false
+ end
- report_warning("nested call with a different font, level %s, quitting",nesting)
- nesting = nesting - 1
- return head, false
+ nesting = nesting + 1
- end
+ if nesting == 1 then
+ currentfont = font
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions -- only needed in gref so we could pass node there instead
+ characters = tfmdata.characters -- but this branch is not entered that often anyway
+ local resources = tfmdata.resources
+ marks = resources.marks
+ classes = resources.classes
+ threshold,
+ factor = getthreshold(font)
+ checkmarks = tfmdata.properties.checkmarks
- -- some 10% faster when no dynamics but hardly measureable on real runs .. but: it only
- -- works when we have no other dynamics as otherwise the zero run will be applied to the
- -- whole stream for which we then need to pass another variable which we won't
+ elseif currentfont ~= font then
- -- if attr == 0 then
- -- attr = false
- -- end
+ report_warning("nested call with a different font, level %s, quitting",nesting)
+ nesting = nesting - 1
+ return head, false
- head = tonut(head)
+ end
- if trace_steps then
- checkstep(head)
- end
+ -- some 10% faster when no dynamics but hardly measureable on real runs .. but: it only
+ -- works when we have no other dynamics as otherwise the zero run will be applied to the
+ -- whole stream for which we then need to pass another variable which we won't
- local initialrl = direction == "TRT" and -1 or 0
+ -- if attr == 0 then
+ -- attr = false
+ -- end
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
- local dirstack = { } -- could move outside function but we can have local runs
- sweephead = { }
-
- -- Keeping track of the headnode is needed for devanagari. (I generalized it a bit
- -- so that multiple cases are also covered.) We could prepend a temp node.
-
- -- We don't goto the next node when a disc node is created so that we can then treat
- -- the pre, post and replace. It's a bit of a hack but works out ok for most cases.
-
- for s=1,#datasets do
- local dataset = datasets[s]
- ----- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- local attribute = dataset[2]
- local sequence = dataset[3] -- sequences[s] -- also dataset[5]
- local rlparmode = initialrl
- local topstack = 0
- local typ = sequence.type
- local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- store in dataset
- local handler = handlers[typ]
- local steps = sequence.steps
- local nofsteps = sequence.nofsteps
- if not steps then
- -- this permits injection, watch the different arguments
- local h, d, ok = handler(head,head,dataset,sequence,nil,nil,nil,0,font,attr)
- if ok then
- done = true
- if h then
- head = h
- end
- end
- elseif typ == "gsub_reversecontextchain" then
- -- this is a limited case, no special treatments like 'init' etc
- local start = find_node_tail(head)
- local rlmode = 0 -- how important is this .. do we need to check for dir?
- while start do
- local char = ischar(start,font)
- if char then
- local a -- happens often so no assignment is faster
- if attr then
- a = getattr(start,0)
+ head = tonut(head)
+
+ if trace_steps then
+ checkstep(head)
+ end
+
+ local initialrl = direction == "TRT" and -1 or 0
+
+ local done = false
+ local datasets = otf.dataset(tfmdata,font,attr)
+ local dirstack = { } -- could move outside function but we can have local runs
+ sweephead = { }
+
+ -- Keeping track of the headnode is needed for devanagari. (I generalized it a bit
+ -- so that multiple cases are also covered.) We could prepend a temp node.
+
+ -- We don't goto the next node when a disc node is created so that we can then treat
+ -- the pre, post and replace. It's a bit of a hack but works out ok for most cases.
+
+ local discs = fastdisc and n and n > 1 and setmetatableindex(function(t,k)
+ local v = usesfont(k,font)
+ t[k] = v
+ return v
+ end)
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ ----- featurevalue = dataset[1] -- todo: pass to function instead of using a global
+ local attribute = dataset[2]
+ local sequence = dataset[3] -- sequences[s] -- also dataset[5]
+ local rlparmode = initialrl
+ local topstack = 0
+ local typ = sequence.type
+ local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- store in dataset
+ local handler = handlers[typ]
+ local steps = sequence.steps
+ local nofsteps = sequence.nofsteps
+ if not steps then
+ -- this permits injection, watch the different arguments
+ local h, d, ok = handler(head,head,dataset,sequence,nil,nil,nil,0,font,attr)
+ if ok then
+ done = true
+ if h then
+ head = h
end
- if not a or (a == attr) then
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- todo: disc?
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- done = true
- break
+ end
+ elseif typ == "gsub_reversecontextchain" then
+ -- this is a limited case, no special treatments like 'init' etc
+ local start = find_node_tail(head)
+ local rlmode = 0 -- how important is this .. do we need to check for dir?
+ while start do
+ local char = ischar(start,font)
+ if char then
+ local a -- happens often so no assignment is faster
+ if attr then
+ a = getattr(start,0)
+ end
+ if not a or (a == attr) then
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- todo: disc?
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ done = true
+ break
+ end
end
+ else
+ report_missing_coverage(dataset,sequence)
end
- else
- report_missing_coverage(dataset,sequence)
end
- end
- if start then
+ if start then
+ start = getprev(start)
+ end
+ else
start = getprev(start)
end
else
start = getprev(start)
end
- else
- start = getprev(start)
end
- end
- else
- local start = head
- local rlmode = initialrl
- if nofsteps == 1 then -- happens often
- local step = steps[1]
- local lookupcache = step.coverage
- if not lookupcache then
- report_missing_coverage(dataset,sequence)
+ else
+ local start = head
+ local rlmode = initialrl
+ if nofsteps == 1 then -- happens often
+ local step = steps[1]
+ local lookupcache = step.coverage
+ if not lookupcache then
+ report_missing_coverage(dataset,sequence)
+ else
+ while start do
+ local char, id = ischar(start,font)
+ if char then
+ -- local a = attr and getattr(start,0)
+ -- if a then
+ -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ -- else
+ -- a = not attribute or getprop(start,a_state) == attribute
+ -- end
+ local a -- happens often so no assignment is faster
+ if attr then
+ if getattr(start,0) == attr and (not attribute or getprop(start,a_state) == attribute) then
+ a = true
+ end
+ elseif not attribute or getprop(start,a_state) == attribute then
+ a = true
+ end
+ if a then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ if ok then
+ done = true
+ end
+ end
+ if start then
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ elseif char == false then
+ -- whatever glyph
+ start = getnext(start)
+ elseif id == glue_code then
+ -- happens often
+ start = getnext(start)
+ elseif id == disc_code then
+ if not discs or discs[start] == true then
+ local ok
+ if gpossing then
+ start, ok = kernrun(start,k_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ elseif typ == "gsub_ligature" then
+ start, ok = testrun(start,t_run_single,c_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ else
+ start, ok = comprun(start,c_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ end
+ if ok then
+ done = true
+ end
+ else
+ start = getnext(start)
+ end
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ elseif id == dir_code then
+ start, topstack, rlmode = txtdirstate(start,dirstack,topstack,rlparmode)
+ elseif id == localpar_code then
+ start, rlparmode, rlmode = pardirstate(start)
+ else
+ start = getnext(start)
+ end
+ end
+ end
+
else
while start do
local char, id = ischar(start,font)
@@ -3975,37 +4057,54 @@ local function featuresprocessor(head,font,attr,direction)
a = true
end
if a then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
- if ok then
- done = true
+ for i=1,nofsteps do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ if lookupcache then
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ else
+ report_missing_coverage(dataset,sequence)
end
end
if start then
start = getnext(start)
end
else
- start = getnext(start)
+ start = getnext(start)
end
elseif char == false then
-- whatever glyph
- start = getnext(start)
+ start = getnext(start)
elseif id == glue_code then
-- happens often
- start = getnext(start)
+ start = getnext(start)
elseif id == disc_code then
- local ok
- if gpossing then
- start, ok = kernrun(start,k_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
- elseif typ == "gsub_ligature" then
- start, ok = testrun(start,t_run_single,c_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ if not discs or discs[start] == true then
+ local ok
+ if gpossing then
+ start, ok = kernrun(start,k_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ elseif typ == "gsub_ligature" then
+ start, ok = testrun(start,t_run_multiple,c_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ else
+ start, ok = comprun(start,c_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ end
+ if ok then
+ done = true
+ end
else
- start, ok = comprun(start,c_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
- end
- if ok then
- done = true
+ start = getnext(start)
end
elseif id == math_code then
start = getnext(end_of_math(start))
@@ -4018,94 +4117,20 @@ local function featuresprocessor(head,font,attr,direction)
end
end
end
+ end
- else
- while start do
- local char, id = ischar(start,font)
- if char then
- -- local a = attr and getattr(start,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(start,a_state) == attribute
- -- end
- local a -- happens often so no assignment is faster
- if attr then
- if getattr(start,0) == attr and (not attribute or getprop(start,a_state) == attribute) then
- a = true
- end
- elseif not attribute or getprop(start,a_state) == attribute then
- a = true
- end
- if a then
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_coverage(dataset,sequence)
- end
- end
- if start then
- start = getnext(start)
- end
- else
- start = getnext(start)
- end
- elseif char == false then
- -- whatever glyph
- start = getnext(start)
- elseif id == glue_code then
- -- happens often
- start = getnext(start)
- elseif id == disc_code then
- local ok
- if gpossing then
- start, ok = kernrun(start,k_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
- elseif typ == "gsub_ligature" then
- start, ok = testrun(start,t_run_multiple,c_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
- else
- start, ok = comprun(start,c_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
- end
- if ok then
- done = true
- end
- elseif id == math_code then
- start = getnext(end_of_math(start))
- elseif id == dir_code then
- start, topstack, rlmode = txtdirstate(start,dirstack,topstack,rlparmode)
- elseif id == localpar_code then
- start, rlparmode, rlmode = pardirstate(start)
- else
- start = getnext(start)
- end
- end
+ if trace_steps then -- ?
+ registerstep(head)
end
- end
- if trace_steps then -- ?
- registerstep(head)
end
- end
+ nesting = nesting - 1
+ head = tonode(head)
- nesting = nesting - 1
- head = tonode(head)
+ return head, done
+ end
- return head, done
end
-- so far
@@ -4119,13 +4144,13 @@ function otf.registerplugin(name,f)
end
end
-local function plugininitializer(tfmdata,value)
+function otf.plugininitializer(tfmdata,value)
if type(value) == "string" then
tfmdata.shared.plugin = plugins[value]
end
end
-local function pluginprocessor(head,font)
+function otf.pluginprocessor(head,font)
local s = fontdata[font].shared
local p = s and s.plugin
if p then
@@ -4138,7 +4163,7 @@ local function pluginprocessor(head,font)
end
end
-local function featuresinitializer(tfmdata,value)
+function otf.featuresinitializer(tfmdata,value)
-- nothing done here any more
end
@@ -4148,18 +4173,15 @@ registerotffeature {
default = true,
initializers = {
position = 1,
- node = featuresinitializer,
- plug = plugininitializer,
+ node = otf.featuresinitializer,
+ plug = otf.plugininitializer,
},
processors = {
- node = featuresprocessor,
- plug = pluginprocessor,
+ node = otf.featuresprocessor,
+ plug = otf.pluginprocessor,
}
}
-otf.nodemodeinitializer = featuresinitializer
-otf.featuresprocessor = featuresprocessor
-
-- This can be used for extra handlers, but should be used with care!
otf.handlers = handlers -- used in devanagari
@@ -4289,9 +4311,9 @@ local function spaceinitializer(tfmdata,value) -- attr
if type(kern) ~= "table" then
left[k] = kern
elseif single then
- left[k] = v[3]
+ left[k] = kern[3]
else
- local one = v[1]
+ local one = kern[1]
if one then
left[k] = one[3]
end
diff --git a/tex/context/base/mkiv/font-ttf.lua b/tex/context/base/mkiv/font-ttf.lua
index 37b30f314..339764d4a 100644
--- a/tex/context/base/mkiv/font-ttf.lua
+++ b/tex/context/base/mkiv/font-ttf.lua
@@ -160,11 +160,12 @@ local function applyaxis(glyph,shape,deltas,dowidth)
local points = shape.points
if points then
local nofpoints = #points
- local h = nofpoints + 1 -- weird, the example font seems to have left first
- ----- l = nofpoints + 2
+ local h = nofpoints + 2 -- weird, the example font seems to have left first
+ local l = nofpoints + 1
----- v = nofpoints + 3
----- t = nofpoints + 4
- local width = dowidth and glyph.width -- what if hvar
+ local dw = 0
+ local dl = 0
for i=1,#deltas do
local deltaset = deltas[i]
local xvalues = deltaset.xvalues
@@ -190,10 +191,22 @@ local function applyaxis(glyph,shape,deltas,dowidth)
p[2] = p[2] + factor * y
end
end
- elseif width then -- and p == h then
- local x = xvalues[d+1]
- if x then
- width = width + factor * x
+ elseif dowidth then
+ -- we've now ran into phantom points which is a bit fuzzy because:
+ -- are there gaps in there?
+ --
+ -- todo: move this outside the loop (when we can be sure of all 4 being there)
+ if d == h then
+ -- we have a phantom point hadvance
+ local x = xvalues[i]
+ if x then
+ dw = dw + factor * x
+ end
+ elseif d == l then
+ local x = xvalues[i]
+ if x then
+ dl = dl + factor * x
+ end
end
end
end
@@ -213,10 +226,14 @@ local function applyaxis(glyph,shape,deltas,dowidth)
end
end
end
- if width then
+ if dowidth then
local x = xvalues[h]
if x then
- width = width + factor * x
+ dw = dw + factor * x
+ end
+ local x = xvalues[l]
+ if x then
+ dl = dl + factor * x
end
end
end
@@ -226,8 +243,10 @@ local function applyaxis(glyph,shape,deltas,dowidth)
-- p[1] = round(p[1])
-- p[2] = round(p[2])
-- end
- if width then
- glyph.width = width
+ if dowidth then
+ local width = glyph.width or 0
+ -- local lsb = glyph.lsb or 0
+ glyph.width = width + dw - dl
end
else
report("no points for glyph %a",glyph.name)
@@ -1066,7 +1085,7 @@ function readers.gvar(f,fontdata,specification,glyphdata,shapedata)
local data = { }
local tuples = { }
local glyphdata = fontdata.glyphs
- local dowidth = fontdata.variabledata.hvarwidths
+ local dowidth = not fontdata.variabledata.hvarwidths
-- there is one more offset (so that one can calculate the size i suppose)
-- so we could test for overflows but we simply assume sane font files
if bittest(flags,0x0001) then
diff --git a/tex/context/base/mkiv/lang-exc.lua b/tex/context/base/mkiv/lang-exc.lua
new file mode 100644
index 000000000..bed328da8
--- /dev/null
+++ b/tex/context/base/mkiv/lang-exc.lua
@@ -0,0 +1,16 @@
+if not modules then modules = { } end modules ['lang-exc'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen",
+ copyright = "ConTeXt Development Team",
+ license = "see context related readme files",
+ dataonly = true,
+}
+
+-- Here we add common exceptions. This file can grow. For now we keep it
+-- in the main base tree. We actually need a generic (shared) pattern or
+-- exception file I guess.
+
+return {
+ "lua-jit",
+}
diff --git a/tex/context/base/mkiv/lang-ini.lua b/tex/context/base/mkiv/lang-ini.lua
index 6de951998..e68938e35 100644
--- a/tex/context/base/mkiv/lang-ini.lua
+++ b/tex/context/base/mkiv/lang-ini.lua
@@ -270,6 +270,8 @@ local function unique(tag,requested,loaded)
end
end
+local shared = false
+
local function loaddefinitions(tag,specification)
statistics.starttiming(languages)
local data, instance = resolve(tag)
@@ -295,6 +297,19 @@ local function loaddefinitions(tag,specification)
local ok = false
local resources = data.resources or { }
data.resources = resources
+ if not shared then
+ local found = resolvers.findfile("lang-exc.lua")
+ if found then
+ shared = dofile(found)
+ if type(shared) == "table" then
+ shared = concat(shared," ")
+ else
+ shared = true
+ end
+ else
+ shared = true
+ end
+ end
for i=1,#definitions do
local definition = definitions[i]
if definition == "" then
@@ -344,13 +359,18 @@ local function loaddefinitions(tag,specification)
end
end
if #ploaded > 0 then
+ -- why not always clear
instance:clear_patterns()
instance:patterns(unique(tag,requested,ploaded))
end
if #eloaded > 0 then
+ -- why not always clear
instance:clear_hyphenation()
instance:hyphenation(concat(eloaded," "))
end
+ if type(shared) == "string" then
+ instance:hyphenation(shared)
+ end
return ok
elseif trace_patterns then
report_initialization("no definitions for language %a",tag)
diff --git a/tex/context/base/mkiv/lang-wrd.lua b/tex/context/base/mkiv/lang-wrd.lua
index 8b6e48401..666f39338 100644
--- a/tex/context/base/mkiv/lang-wrd.lua
+++ b/tex/context/base/mkiv/lang-wrd.lua
@@ -187,6 +187,10 @@ local function mark_words(head,whenfound) -- can be optimized and shared
if n > 0 then
local r = getfield(current,"replace")
if r then
+ -- also disc itself
+ n = n + 1
+ nds[n] = current
+ --
for current in traverse_ids(glyph_code,r) do
local code = getchar(current)
n = n + 1
@@ -195,7 +199,7 @@ local function mark_words(head,whenfound) -- can be optimized and shared
str[s] = utfchar(code)
end
end
- end
+ end
elseif id == kern_code and getsubtype(current) == kerning_code and s > 0 then
-- ok
elseif s > 0 then
diff --git a/tex/context/base/mkiv/lpdf-ini.lua b/tex/context/base/mkiv/lpdf-ini.lua
index 1b24269a6..635f365a9 100644
--- a/tex/context/base/mkiv/lpdf-ini.lua
+++ b/tex/context/base/mkiv/lpdf-ini.lua
@@ -543,6 +543,8 @@ function lpdf.escaped(str)
return lpegmatch(escaped,str) or str
end
+local pdfnull, pdfboolean, pdfreference, pdfverbose
+
do
local p_null = { } setmetatable(p_null, mt_z)
diff --git a/tex/context/base/mkiv/luat-fio.lua b/tex/context/base/mkiv/luat-fio.lua
index 302d17a66..a215d1b16 100644
--- a/tex/context/base/mkiv/luat-fio.lua
+++ b/tex/context/base/mkiv/luat-fio.lua
@@ -15,12 +15,10 @@ texconfig.shell_escape = 't'
texconfig.max_print_line = 100000
texconfig.max_in_open = 1000
-if not resolvers.instance then
+if not resolvers.initialized() then
resolvers.reset()
- resolvers.instance.validfile = resolvers.validctxfile
-
-- we now load the file database as we might need files other than
-- tex and lua file on the given path
@@ -128,7 +126,7 @@ statistics.register("resource resolver", function()
scandata.n,
scandata.time,
scandata.shared,
- #resolvers.instance.foundintrees,
+ #resolvers.foundintrees(),
#scandata.paths > 0 and concat(scandata.paths," ") or "<none>"
)
end)
diff --git a/tex/context/base/mkiv/lxml-aux.lua b/tex/context/base/mkiv/lxml-aux.lua
index ee0909cbf..3c0121448 100644
--- a/tex/context/base/mkiv/lxml-aux.lua
+++ b/tex/context/base/mkiv/lxml-aux.lua
@@ -412,7 +412,9 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
end
local data = nil
if name and name ~= "" then
- data = loaddata(name) or ""
+ local d, n = loaddata(name)
+ data = d or ""
+ name = n or name
if trace_inclusions then
report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
end
@@ -423,6 +425,9 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
+local settings = xmldata.settings
+local savedresource = settings.currentresource
+settings.currentresource = name
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
@@ -433,6 +438,7 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
local child = xml.body(xi) -- xml.assign(d,k,xi)
child.__p__ = ekrt
child.__f__ = name -- handy for tracing
+child.cf = name
epdt[ek.ni] = child
local settings = xmldata.settings
local inclusions = settings and settings.inclusions
@@ -453,6 +459,7 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
end
end
end
+settings.currentresource = savedresource
end
end
end
diff --git a/tex/context/base/mkiv/lxml-tab.lua b/tex/context/base/mkiv/lxml-tab.lua
index 02228c7c5..a4234ca6e 100644
--- a/tex/context/base/mkiv/lxml-tab.lua
+++ b/tex/context/base/mkiv/lxml-tab.lua
@@ -160,9 +160,20 @@ local entities, parameters
local strip, utfize, resolve, cleanup, resolve_predefined, unify_predefined
local dcache, hcache, acache
local mt, dt, nt
+local currentfilename, currentline, linenumbers
+
+local grammar_parsed_text_one
+local grammar_parsed_text_two
+local grammar_unparsed_text
+
+local handle_hex_entity
+local handle_dec_entity
+local handle_any_entity_dtd
+local handle_any_entity_text
local function preparexmlstate(settings)
if settings then
+ linenumbers = settings.linenumbers
stack = { }
level = 0
top = { }
@@ -179,6 +190,8 @@ local function preparexmlstate(settings)
unify_predefined = settings.unify_predefined_entities -- &#038; -> &amp;
cleanup = settings.text_cleanup
entities = settings.entities or { }
+ currentfilename = settings.currentresource
+ currentline = 1
parameters = { }
reported_at_errors = { }
dcache = { }
@@ -193,6 +206,7 @@ local function preparexmlstate(settings)
resolve_predefined = true
end
else
+ linenumbers = false
stack = nil
level = nil
top = nil
@@ -214,6 +228,8 @@ local function preparexmlstate(settings)
dcache = nil
hcache = nil
acache = nil
+ currentfilename = nil
+ currentline = 1
end
end
@@ -258,14 +274,24 @@ local function add_empty(spacing, namespace, tag)
top = stack[level]
dt = top.dt
nt = #dt + 1
- local t = {
+ local t = linenumbers and {
+ ns = namespace or "",
+ rn = resolved,
+ tg = tag,
+ at = at,
+ dt = { },
+ ni = nt, -- set slot, needed for css filtering
+ cf = currentfilename,
+ cl = currentline,
+ __p__ = top,
+ } or {
ns = namespace or "",
rn = resolved,
tg = tag,
at = at,
dt = { },
ni = nt, -- set slot, needed for css filtering
- __p__ = top
+ __p__ = top,
}
dt[nt] = t
setmetatable(t, mt)
@@ -281,18 +307,28 @@ local function add_begin(spacing, namespace, tag)
dt[nt] = spacing
end
local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top = {
+ dt = { }
+ top = linenumbers and {
ns = namespace or "",
rn = resolved,
tg = tag,
at = at,
- dt = { },
+ dt = dt,
+ ni = nil, -- preset slot, needed for css filtering
+ cf = currentfilename,
+ cl = currentline,
+ __p__ = stack[level],
+ } or {
+ ns = namespace or "",
+ rn = resolved,
+ tg = tag,
+ at = at,
+ dt = dt,
ni = nil, -- preset slot, needed for css filtering
- __p__ = stack[level]
+ __p__ = stack[level],
}
setmetatable(top, mt)
- dt = top.dt
- nt = #dt
+ nt = 0
level = level + 1
stack[level] = top
at = { }
@@ -372,7 +408,15 @@ local function add_special(what, spacing, text)
-- forget it
else
nt = nt + 1
- dt[nt] = {
+ dt[nt] = linenumbers and {
+ special = true,
+ ns = "",
+ tg = what,
+ ni = nil, -- preset slot
+ dt = { text },
+ cf = currentfilename,
+ cl = currentline,
+ } or {
special = true,
ns = "",
tg = what,
@@ -406,14 +450,6 @@ end
-- these will be set later
-local grammar_parsed_text_one
-local grammar_parsed_text_two
-
-local handle_hex_entity
-local handle_dec_entity
-local handle_any_entity_dtd
-local handle_any_entity_text
-
-- in order to overcome lua limitations we wrap entity stuff in a
-- closure
@@ -880,7 +916,14 @@ local function handle_crap_error(chr)
return chr
end
+local function handlenewline()
+ currentline = currentline + 1
+end
+
+local spacetab = S(' \t')
local space = S(' \r\n\t')
+local newline = lpegpatterns.newline / handlenewline
+local anything = P(1)
local open = P('<')
local close = P('>')
local squote = S("'")
@@ -897,67 +940,9 @@ local name = name_yes + name_nop
local utfbom = lpegpatterns.utfbom -- no capture
local spacing = C(space^0)
------ entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close-ampersand)^0
-local hexentitycontent = R("AF","af","09")^1
-local decentitycontent = R("09")^1
-local parsedentity = P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity_dtd) -- can be Cc(true)
-local parsedentity_text= P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity_text) -- can be Cc(false)
------ entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
-local entity = (ampersand/"") * parsedentity * (semicolon/"")
- + ampersand * (anyentitycontent / handle_end_entity)
-local entity_text = (ampersand/"") * parsedentity_text * (semicolon/"")
- + ampersand * (anyentitycontent / handle_end_entity)
-
-local text_unparsed = C((1-open)^1)
-local text_parsed = (Cs((1-open-ampersand)^1)/add_text + Cs(entity_text)/add_text)^1
-
-local somespace = space^1
-local optionalspace = space^0
-
------ value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
-local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
-
-local endofattributes = slash * close + close -- recovery of flacky html
-local whatever = space * name * optionalspace * equal
------ wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
------ wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
------ wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
-local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-
-local attributevalue = value + wrongvalue
-
-local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
------ attributes = (attribute)^0
-
-local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
-
-local parsedtext = text_parsed -- / add_text
-local unparsedtext = text_unparsed / add_text
-local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
-
-local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
-local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
-local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
-
--- todo: combine the opens in:
-
-local begincomment = open * P("!--")
-local endcomment = P("--") * close
-local begininstruction = open * P("?")
-local endinstruction = P("?") * close
-local begincdata = open * P("![CDATA[")
-local endcdata = P("]]") * close
-
-local someinstruction = C((1 - endinstruction)^0)
-local somecomment = C((1 - endcomment )^0)
-local somecdata = C((1 - endcdata )^0)
+local space_nl = spacetab + newline
+local spacing_nl = Cs((space_nl)^0)
+local anything_nl = newline + P(1)
local function weirdentity(k,v)
if trace_entities then
@@ -984,97 +969,177 @@ local function publicentity(k,v,n)
entities[k] = v
end
--- todo: separate dtd parser
+local function install(spacenewline,spacing,anything)
-local begindoctype = open * P("!DOCTYPE")
-local enddoctype = close
-local beginset = P("[")
-local endset = P("]")
-local wrdtypename = C((1-somespace-P(";"))^1)
-local doctypename = C((1-somespace-close)^0)
-local elementdoctype = optionalspace * P("<!ELEMENT") * (1-close)^0 * close
+ local anyentitycontent = (1-open-semicolon-space-close-ampersand)^0
+ local hexentitycontent = R("AF","af","09")^1
+ local decentitycontent = R("09")^1
+ local parsedentity = P("#")/"" * (
+ P("x")/"" * (hexentitycontent/handle_hex_entity) +
+ (decentitycontent/handle_dec_entity)
+ ) + (anyentitycontent/handle_any_entity_dtd) -- can be Cc(true)
+ local parsedentity_text= P("#")/"" * (
+ P("x")/"" * (hexentitycontent/handle_hex_entity) +
+ (decentitycontent/handle_dec_entity)
+ ) + (anyentitycontent/handle_any_entity_text) -- can be Cc(false)
+ local entity = (ampersand/"") * parsedentity * (semicolon/"")
+ + ampersand * (anyentitycontent / handle_end_entity)
+ local entity_text = (ampersand/"") * parsedentity_text * (semicolon/"")
+ + ampersand * (anyentitycontent / handle_end_entity)
-local basiccomment = begincomment * ((1 - endcomment)^0) * endcomment
+ local text_unparsed = Cs((anything-open)^1)
+ local text_parsed = (Cs((anything-open-ampersand)^1)/add_text + Cs(entity_text)/add_text)^1
-local weirdentitytype = P("%") * (somespace * doctypename * somespace * value) / weirdentity
-local normalentitytype = (doctypename * somespace * value) / normalentity
-local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
-local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
-local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype + weirdentitytype) * optionalspace * close
+ local somespace = (spacenewline)^1
+ local optionalspace = (spacenewline)^0
-local function weirdresolve(s)
- lpegmatch(entitydoctype,parameters[s])
-end
+ local value = (squote * Cs((entity + (anything - squote))^0) * squote) + (dquote * Cs((entity + (anything - dquote))^0) * dquote) -- ampersand and < also invalid in value
-local function normalresolve(s)
- lpegmatch(entitydoctype,entities[s])
-end
+ local endofattributes = slash * close + close -- recovery of flacky html
+ local whatever = space * name * optionalspace * equal
+ local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-local entityresolve = P("%") * (wrdtypename/weirdresolve ) * P(";")
- + P("&") * (wrdtypename/normalresolve) * P(";")
+ local attributevalue = value + wrongvalue
-entitydoctype = entitydoctype + entityresolve
+ local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
--- we accept comments in doctypes
+-- local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
+ local attributes = (attribute + somespace^-1 * (((anything-endofattributes)^1)/attribute_specification_error))^0
-local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + entityresolve + basiccomment + space)^0 * optionalspace * endset
-local definitiondoctype= doctypename * somespace * doctypeset
-local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
-local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
-local simpledoctype = (1-close)^1 -- * balanced^0
-local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+ local parsedtext = text_parsed -- / add_text
+ local unparsedtext = text_unparsed / add_text
+ local balanced = P { "[" * ((anything - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
-local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
-local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
-local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
-local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
+ local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
+ local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
+ local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
-local crap_parsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata - ampersand
-local crap_unparsed = 1 - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata
-local parsedcrap = Cs((crap_parsed^1 + entity_text)^1) / handle_crap_error
-local parsedcrap = Cs((crap_parsed^1 + entity_text)^1) / handle_crap_error
-local unparsedcrap = Cs((crap_unparsed )^1) / handle_crap_error
+ -- todo: combine the opens in:
--- nicer but slower:
---
--- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
--- local comment = (Cc("@cm@") * spacing * begincomment * somecomment * endcomment ) / add_special
--- local cdata = (Cc("@cd@") * spacing * begincdata * somecdata * endcdata ) / add_special
--- local doctype = (Cc("@dt@") * spacing * begindoctype * somedoctype * enddoctype ) / add_special
+ local begincomment = open * P("!--")
+ local endcomment = P("--") * close
+ local begininstruction = open * P("?")
+ local endinstruction = P("?") * close
+ local begincdata = open * P("![CDATA[")
+ local endcdata = P("]]") * close
-local trailer = space^0 * (text_unparsed/set_message)^0
+ local someinstruction = C((anything - endinstruction)^0)
+ local somecomment = C((anything - endcomment )^0)
+ local somecdata = C((anything - endcdata )^0)
--- comment + emptyelement + text + cdata + instruction + V("parent"), -- 6.5 seconds on 40 MB database file
--- text + comment + emptyelement + cdata + instruction + V("parent"), -- 5.8
--- text + V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
+ -- todo: separate dtd parser
--- local grammar_parsed_text = P { "preamble",
--- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
--- parent = beginelement * V("children")^0 * endelement,
--- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
--- }
+ local begindoctype = open * P("!DOCTYPE")
+ local enddoctype = close
+ local beginset = P("[")
+ local endset = P("]")
+ local wrdtypename = C((anything-somespace-P(";"))^1)
+ local doctypename = C((anything-somespace-close)^0)
+ local elementdoctype = optionalspace * P("<!ELEMENT") * (anything-close)^0 * close
-grammar_parsed_text_one = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0,
-}
+ local basiccomment = begincomment * ((anything - endcomment)^0) * endcomment
-grammar_parsed_text_two = P { "followup",
- followup = V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
-}
+ local weirdentitytype = P("%") * (somespace * doctypename * somespace * value) / weirdentity
+ local normalentitytype = (doctypename * somespace * value) / normalentity
+ local publicentitytype = (doctypename * somespace * P("PUBLIC") * somespace * value)/publicentity
+ local systementitytype = (doctypename * somespace * P("SYSTEM") * somespace * value * somespace * P("NDATA") * somespace * doctypename)/systementity
+ local entitydoctype = optionalspace * P("<!ENTITY") * somespace * (systementitytype + publicentitytype + normalentitytype + weirdentitytype) * optionalspace * close
-local grammar_unparsed_text = P { "preamble",
- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
- parent = beginelement * V("children")^0 * endelement,
- children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction + unparsedcrap,
-}
+ local function weirdresolve(s)
+ lpegmatch(entitydoctype,parameters[s])
+ end
+
+ local function normalresolve(s)
+ lpegmatch(entitydoctype,entities[s])
+ end
+
+ local entityresolve = P("%") * (wrdtypename/weirdresolve ) * P(";")
+ + P("&") * (wrdtypename/normalresolve) * P(";")
+
+ entitydoctype = entitydoctype + entityresolve
+
+ -- we accept comments in doctypes
+
+ local doctypeset = beginset * optionalspace * P(elementdoctype + entitydoctype + entityresolve + basiccomment + space)^0 * optionalspace * endset
+ local definitiondoctype= doctypename * somespace * doctypeset
+ local publicdoctype = doctypename * somespace * P("PUBLIC") * somespace * value * somespace * value * somespace * doctypeset
+ local systemdoctype = doctypename * somespace * P("SYSTEM") * somespace * value * somespace * doctypeset
+ local simpledoctype = (anything-close)^1 -- * balanced^0
+ local somedoctype = C((somespace * (publicdoctype + systemdoctype + definitiondoctype + simpledoctype) * optionalspace)^0)
+
+ local instruction = (spacing * begininstruction * someinstruction * endinstruction) / function(...) add_special("@pi@",...) end
+ local comment = (spacing * begincomment * somecomment * endcomment ) / function(...) add_special("@cm@",...) end
+ local cdata = (spacing * begincdata * somecdata * endcdata ) / function(...) add_special("@cd@",...) end
+ local doctype = (spacing * begindoctype * somedoctype * enddoctype ) / function(...) add_special("@dt@",...) end
+
+ local crap_parsed = anything - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata - ampersand
+ local crap_unparsed = anything - beginelement - endelement - emptyelement - begininstruction - begincomment - begincdata
+
+ local parsedcrap = Cs((crap_parsed^1 + entity_text)^1) / handle_crap_error
+ local parsedcrap = Cs((crap_parsed^1 + entity_text)^1) / handle_crap_error
+ local unparsedcrap = Cs((crap_unparsed )^1) / handle_crap_error
+
+ -- nicer but slower:
+ --
+ -- local instruction = (Cc("@pi@") * spacing * begininstruction * someinstruction * endinstruction) / add_special
+ -- local comment = (Cc("@cm@") * spacing * begincomment * somecomment * endcomment ) / add_special
+ -- local cdata = (Cc("@cd@") * spacing * begincdata * somecdata * endcdata ) / add_special
+ -- local doctype = (Cc("@dt@") * spacing * begindoctype * somedoctype * enddoctype ) / add_special
+
+ local trailer = space^0 * (text_unparsed/set_message)^0
+
+ -- comment + emptyelement + text + cdata + instruction + V("parent"), -- 6.5 seconds on 40 MB database file
+ -- text + comment + emptyelement + cdata + instruction + V("parent"), -- 5.8
+ -- text + V("parent") + emptyelement + comment + cdata + instruction, -- 5.5
+
+ -- local grammar_parsed_text = P { "preamble",
+ -- preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
+ -- parent = beginelement * V("children")^0 * endelement,
+ -- children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
+ -- }
+
+ local grammar_parsed_text_one = P { "preamble",
+ preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0,
+ }
+
+ local grammar_parsed_text_two = P { "followup",
+ followup = V("parent") * trailer,
+ parent = beginelement * V("children")^0 * endelement,
+ children = parsedtext + V("parent") + emptyelement + comment + cdata + instruction + parsedcrap,
+ }
+
+ local grammar_unparsed_text = P { "preamble",
+ preamble = utfbom^0 * instruction^0 * (doctype + comment + instruction)^0 * V("parent") * trailer,
+ parent = beginelement * V("children")^0 * endelement,
+ children = unparsedtext + V("parent") + emptyelement + comment + cdata + instruction + unparsedcrap,
+ }
+
+ return grammar_parsed_text_one, grammar_parsed_text_two, grammar_unparsed_text
+
+end
+
+grammar_parsed_text_one_nop ,
+grammar_parsed_text_two_nop ,
+grammar_unparsed_text_nop = install(space, spacing, anything)
+
+grammar_parsed_text_one_yes ,
+grammar_parsed_text_two_yes ,
+grammar_unparsed_text_yes = install(space_nl, spacing_nl, anything_nl)
-- maybe we will add settings to result as well
local function _xmlconvert_(data,settings)
settings = settings or { } -- no_root strip_cm_and_dt given_entities parent_root error_handler
preparexmlstate(settings)
+ if settings.linenumbers then
+ grammar_parsed_text_one = grammar_parsed_text_one_yes
+ grammar_parsed_text_two = grammar_parsed_text_two_yes
+ grammar_unparsed_text = grammar_unparsed_text_yes
+ else
+ grammar_parsed_text_one = grammar_parsed_text_one_nop
+ grammar_parsed_text_two = grammar_parsed_text_two_nop
+ grammar_unparsed_text = grammar_unparsed_text_nop
+ end
local preprocessor = settings.preprocessor
if data and data ~= "" and type(preprocessor) == "function" then
data = preprocessor(data,settings) or data -- settings.currentresource
@@ -1113,7 +1178,7 @@ local function _xmlconvert_(data,settings)
end
local result
if errorstr and errorstr ~= "" then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
+ result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at = { }, er = true } } }
setmetatable(result, mt)
setmetatable(result.dt[1], mt)
setmetatable(stack, mt)
diff --git a/tex/context/base/mkiv/lxml-tex.lua b/tex/context/base/mkiv/lxml-tex.lua
index 582185ba8..f79aa708f 100644
--- a/tex/context/base/mkiv/lxml-tex.lua
+++ b/tex/context/base/mkiv/lxml-tex.lua
@@ -17,6 +17,7 @@ local lpegmatch = lpeg.match
local P, S, C, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs
local patterns = lpeg.patterns
local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
local tex, xml = tex, xml
local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
@@ -481,13 +482,24 @@ end
xml.originalload = xml.originalload or xml.load
-local noffiles, nofconverted = 0, 0
+local noffiles = 0
+local nofconverted = 0
+local linenumbers = false
+
+-- directives.register("lxml.linenumbers", function(v)
+-- linenumbers = v
+-- end)
+
+directives.register("system.synctex.xml",function(v)
+ linenumbers = v
+end)
function xml.load(filename,settings)
noffiles, nofconverted = noffiles + 1, nofconverted + 1
starttiming(xml)
local ok, data = resolvers.loadbinfile(filename)
settings = settings or { }
+ settings.linenumbers = linenumbers
settings.currentresource = filename
local xmltable = xml.convert((ok and data) or "",settings)
settings.currentresource = nil
@@ -528,6 +540,7 @@ local function lxmlconvert(id,data,compress,currentresource)
resolve_entities = function(str,ent) return entityconverter(id,str,ent) end,
currentresource = tostring(currentresource or id),
preprocessor = lxml.preprocessor,
+ linenumbers = linenumbers,
}
if compress and compress == variables.yes then
settings.strip_cm_and_dt = true
@@ -545,7 +558,8 @@ function lxml.load(id,filename,compress)
noffiles, nofconverted = noffiles + 1, nofconverted + 1
starttiming(xml)
local ok, data = resolvers.loadbinfile(filename)
- local xmltable = lxmlconvert(id,(ok and data) or "",compress,format("id: %s, file: %s",id,filename))
+-- local xmltable = lxmlconvert(id,(ok and data) or "",compress,formatters["id: %s, file: %s"](id,filename))
+local xmltable = lxmlconvert(id,(ok and data) or "",compress,filename)
stoptiming(xml)
lxml.store(id,xmltable,filename)
return xmltable, filename
@@ -599,7 +613,8 @@ function lxml.include(id,pattern,attribute,options)
report_lxml("including file %a",filename)
end
noffiles, nofconverted = noffiles + 1, nofconverted + 1
- return resolvers.loadtexfile(filename) or ""
+ return resolvers.loadtexfile(filename) or "",
+ resolvers.findtexfile(filename) or ""
else
return ""
end
@@ -694,7 +709,37 @@ local default_element_handler = xml.gethandlers("verbose").functions["@el@"]
-- return v
-- end)
+local setfilename = false
+local trace_name = false
+local report_name = logs.reporter("lxml")
+
+directives.register("system.synctex.xml",function(v)
+ if v then
+ setfilename = luatex.synctex.setfilename
+ else
+ setfilename = false
+ end
+end)
+
+local function syncfilename(e,where)
+ local cf = e.cf
+ if cf then
+ local cl = e.cl or 1
+ if trace_name then
+ report_name("set filename, case %a, tag %a, file %a, line %a",where,e.tg,cf,cl)
+ end
+ setfilename(cf,cl);
+ end
+end
+
+trackers.register("system.synctex.xml",function(v)
+ trace_name = v
+end)
+
local function tex_element(e,handlers)
+ if setfilename then
+ syncfilename(e,"element")
+ end
local command = e.command
if command == nil then
default_element_handler(e,handlers)
@@ -895,11 +940,14 @@ function lxml.setaction(id,pattern,action)
end
end
-local function sprint(root) -- check rawroot usage
+local function sprint(root,p) -- check rawroot usage
if root then
local tr = type(root)
if tr == "string" then -- can also be result of lpath
-- rawroot = false -- ?
+ if setfilename and p then
+ syncfilename(p,"sprint s")
+ end
root = xmlunspecialized(root)
lpegmatch(xmltextcapture,root)
elseif tr == "table" then
@@ -910,12 +958,32 @@ local function sprint(root) -- check rawroot usage
root = xmldespecialized(xmltostring(root))
lpegmatch(xmltextcapture,root) -- goes to toc
else
+if setfilename and p then -- and not root.cl
+ syncfilename(p,"sprint t")
+end
xmlserialize(root,xmltexhandler)
end
end
end
end
+-- local function tprint(root) -- we can move sprint inline
+-- local tr = type(root)
+-- if tr == "table" then
+-- local n = #root
+-- if n == 0 then
+-- -- skip
+-- else
+-- for i=1,n do
+-- sprint(root[i])
+-- end
+-- end
+-- elseif tr == "string" then
+-- root = xmlunspecialized(root)
+-- lpegmatch(xmltextcapture,root)
+-- end
+-- end
+
local function tprint(root) -- we can move sprint inline
local tr = type(root)
if tr == "table" then
@@ -924,7 +992,24 @@ local function tprint(root) -- we can move sprint inline
-- skip
else
for i=1,n do
- sprint(root[i])
+ -- sprint(root[i]) -- inlined because of filename:
+ local ri = root[i]
+ local tr = type(ri)
+ if tr == "string" then -- can also be result of lpath
+ if setfilename then
+ syncfilename(ri,"tprint")
+ end
+ root = xmlunspecialized(ri)
+ lpegmatch(xmltextcapture,ri)
+ elseif tr == "table" then
+ if forceraw then
+ rawroot = ri
+ root = xmldespecialized(xmltostring(ri))
+ lpegmatch(xmltextcapture,ri) -- goes to toc
+ else
+ xmlserialize(ri,xmltexhandler)
+ end
+ end
end
end
elseif tr == "string" then
@@ -942,6 +1027,9 @@ local function cprint(root) -- content
root = xmlunspecialized(root)
lpegmatch(xmltextcapture,root)
else
+ if setfilename then
+ syncfilename(root,"cprint")
+ end
local rootdt = root.dt
if forceraw then
rawroot = root
@@ -961,7 +1049,11 @@ xml.cprint = cprint local xmlcprint = cprint -- calls ct mathml -> will be re
-- now we can flush
function lxml.main(id)
- xmlserialize(getid(id),xmltexhandler) -- the real root (@rt@)
+ local root = getid(id)
+-- if setfilename then
+-- syncfilename(root,"main")
+-- end
+ xmlserialize(root,xmltexhandler) -- the real root (@rt@)
end
-- -- lines (untested)
@@ -1932,7 +2024,7 @@ function lxml.flush(id)
if e then
local dt = e.dt
if dt then
- xmlsprint(dt)
+ xmlsprint(dt,e)
end
end
end
@@ -1954,7 +2046,7 @@ function lxml.snippet(id,i)
if dt then
local dti = dt[i]
if dti then
- xmlsprint(dti)
+ xmlsprint(dti,e)
end
end
end
@@ -2086,12 +2178,13 @@ function lxml.strip(id,pattern,nolines,anywhere)
end
function lxml.stripped(id,pattern,nolines)
- local str = xmltext(getid(id),pattern) or ""
+ local root = getid(id)
+ local str = xmltext(root,pattern) or ""
str = gsub(str,"^%s*(.-)%s*$","%1")
if nolines then
str = gsub(str,"%s+"," ")
end
- xmlsprint(str)
+ xmlsprint(str,root)
end
function lxml.delete(id,pattern)
diff --git a/tex/context/base/mkiv/math-act.lua b/tex/context/base/mkiv/math-act.lua
index 4fcac17d8..ddc7510b1 100644
--- a/tex/context/base/mkiv/math-act.lua
+++ b/tex/context/base/mkiv/math-act.lua
@@ -28,6 +28,7 @@ local appendgroup = sequencers.appendgroup
local appendaction = sequencers.appendaction
local fontchars = fonts.hashes.characters
+local fontproperties = fonts.hashes.properties
local mathfontparameteractions = sequencers.new {
name = "mathparameters",
@@ -664,10 +665,20 @@ blocks["uppercasedoublestruck"].gaps = {
local stack = { }
-function mathematics.registerfallbackid(n,id)
+function mathematics.registerfallbackid(n,id,name)
+ if trace_collecting then
+ report_math("resolved fallback font %i, name %a, id %a, used %a",
+ n,name,id,fontproperties[id].fontname)
+ end
stack[#stack][n] = id
end
+interfaces.implement { -- will be shared with text
+ name = "registerfontfallbackid",
+ arguments = { "integer", "integer", "string" },
+ actions = mathematics.registerfallbackid,
+}
+
function mathematics.resolvefallbacks(target,specification,fallbacks)
local definitions = fonts.collections.definitions[fallbacks]
if definitions then
@@ -733,8 +744,8 @@ function mathematics.finishfallbacks(target,specification,fallbacks)
end
if force or (not done[unic] and not characters[unic]) then
if trace_collecting then
- report_math("replacing math character %C by %C using vector %a and font %a named %a%s%s",
- unic,unicode,fallbacks,id,name,check and ", checked",gap and ", gap plugged")
+ report_math("replacing math character %C by %C using vector %a and font id %a for %a%s%s",
+ unic,unicode,fallbacks,id,fontproperties[id].fontname,check and ", checked",gap and ", gap plugged")
end
characters[unic] = copiedglyph(target,characters,chars,unicode,index)
done[unic] = true
diff --git a/tex/context/base/mkiv/math-fbk.lua b/tex/context/base/mkiv/math-fbk.lua
index 564ece8d7..7621b6525 100644
--- a/tex/context/base/mkiv/math-fbk.lua
+++ b/tex/context/base/mkiv/math-fbk.lua
@@ -519,45 +519,19 @@ virtualcharacters[0xFE303] = function(data) return smashed(data,0x0303,0xFE303)
-- these primes in fonts are a real mess .. kind of a dead end, so don't wonder about
-- the values below
--- todo: check tounicodes
-
-local function smashed(data,unicode,optional)
- local oldchar = data.characters[unicode]
- if oldchar then
- local xheight = data.target.parameters.xheight
- local height = 1.2 * xheight
- local shift = oldchar.height - height
- local newchar = {
- commands = {
- { "down", shift },
- { "char", unicode },
- },
- height = height,
- width = oldchar.width,
- }
- return newchar
- elseif not optional then
- report_fallbacks("missing %U prime in font %a",unicode,data.target.properties.fullname)
- end
-end
-
--- -- relocate all but less flexible so not used .. instead some noad hackery plus
--- -- the above
---
-- local function smashed(data,unicode,optional)
-- local oldchar = data.characters[unicode]
-- if oldchar then
-- local xheight = data.target.parameters.xheight
--- local height = oldchar.height
--- local shift = oldchar.height < 1.5*xheight and -(1.8*xheight-height) or 0
+-- local height = 1.25 * xheight
+-- local shift = oldchar.height - height
-- local newchar = {
-- commands = {
-- { "down", shift },
-- { "char", unicode },
-- },
--- unicode = unicode,
--- height = height,
--- width = oldchar.width,
+-- height = height,
+-- width = oldchar.width,
-- }
-- return newchar
-- elseif not optional then
@@ -565,23 +539,23 @@ end
-- end
-- end
-addextra(0xFE932, { description="SMASHED PRIME 0x02032", unicodeslot=0xFE932 } )
-addextra(0xFE933, { description="SMASHED PRIME 0x02033", unicodeslot=0xFE933 } )
-addextra(0xFE934, { description="SMASHED PRIME 0x02034", unicodeslot=0xFE934 } )
-addextra(0xFE957, { description="SMASHED PRIME 0x02057", unicodeslot=0xFE957 } )
+-- addextra(0xFE932, { description = "SMASHED PRIME 0x02032", unicodeslot = 0xFE932 } )
+-- addextra(0xFE933, { description = "SMASHED PRIME 0x02033", unicodeslot = 0xFE933 } )
+-- addextra(0xFE934, { description = "SMASHED PRIME 0x02034", unicodeslot = 0xFE934 } )
+-- addextra(0xFE957, { description = "SMASHED PRIME 0x02057", unicodeslot = 0xFE957 } )
-addextra(0xFE935, { description="SMASHED BACKWARD PRIME 0x02035", unicodeslot=0xFE935 } )
-addextra(0xFE936, { description="SMASHED BACKWARD PRIME 0x02036", unicodeslot=0xFE936 } )
-addextra(0xFE937, { description="SMASHED BACKWARD PRIME 0x02037", unicodeslot=0xFE937 } )
+-- addextra(0xFE935, { description = "SMASHED BACKWARD PRIME 0x02035", unicodeslot = 0xFE935 } )
+-- addextra(0xFE936, { description = "SMASHED BACKWARD PRIME 0x02036", unicodeslot = 0xFE936 } )
+-- addextra(0xFE937, { description = "SMASHED BACKWARD PRIME 0x02037", unicodeslot = 0xFE937 } )
-virtualcharacters[0xFE932] = function(data) return smashed(data,0x02032) end
-virtualcharacters[0xFE933] = function(data) return smashed(data,0x02033) end
-virtualcharacters[0xFE934] = function(data) return smashed(data,0x02034) end
-virtualcharacters[0xFE957] = function(data) return smashed(data,0x02057) end
+-- virtualcharacters[0xFE932] = function(data) return smashed(data,0x02032) end
+-- virtualcharacters[0xFE933] = function(data) return smashed(data,0x02033) end
+-- virtualcharacters[0xFE934] = function(data) return smashed(data,0x02034) end
+-- virtualcharacters[0xFE957] = function(data) return smashed(data,0x02057) end
-virtualcharacters[0xFE935] = function(data) return smashed(data,0x02035,true) end
-virtualcharacters[0xFE936] = function(data) return smashed(data,0x02036,true) end
-virtualcharacters[0xFE937] = function(data) return smashed(data,0x02037,true) end
+-- virtualcharacters[0xFE935] = function(data) return smashed(data,0x02035,true) end
+-- virtualcharacters[0xFE936] = function(data) return smashed(data,0x02036,true) end
+-- virtualcharacters[0xFE937] = function(data) return smashed(data,0x02037,true) end
-- actuarian (beware: xits has an ugly one)
diff --git a/tex/context/base/mkiv/math-noa.lua b/tex/context/base/mkiv/math-noa.lua
index f9e8c9f70..2e94f0d71 100644
--- a/tex/context/base/mkiv/math-noa.lua
+++ b/tex/context/base/mkiv/math-noa.lua
@@ -1510,6 +1510,8 @@ local collapse = { } processors.collapse = collapse
local mathpairs = characters.mathpairs -- next will move to char-def
+-- 0x02B9 modifier
+
mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime)
mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime)
mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime)
@@ -1521,24 +1523,40 @@ mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
mathpairs[0x222C] = { [0x222B] = 0x222D }
mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple
-mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple
+mathpairs[0x2016] = { [0x007C] = 0x2980, [0x02B9] = 0x2016 } -- double+bar=triple
+
+-- local movesub = {
+-- -- primes
+-- [0x2032] = 0xFE932,
+-- [0x2033] = 0xFE933,
+-- [0x2034] = 0xFE934,
+-- [0x2057] = 0xFE957,
+-- -- reverse primes
+-- [0x2035] = 0xFE935,
+-- [0x2036] = 0xFE936,
+-- [0x2037] = 0xFE937,
+-- }
local movesub = {
-- primes
- [0x2032] = 0xFE932,
- [0x2033] = 0xFE933,
- [0x2034] = 0xFE934,
- [0x2057] = 0xFE957,
+ [0x2032] = 0x2032,
+ [0x2033] = 0x2033,
+ [0x2034] = 0x2034,
+ [0x2057] = 0x2057,
-- reverse primes
- [0x2035] = 0xFE935,
- [0x2036] = 0xFE936,
- [0x2037] = 0xFE937,
+ [0x2035] = 0x2035,
+ [0x2036] = 0x2036,
+ [0x2037] = 0x2037,
}
+-- inner under over vcenter
+
local validpair = {
- [noad_rel] = true,
[noad_ord] = true,
+ [noad_rel] = true,
[noad_bin] = true, -- new
+ [noad_open] = true, -- new
+ [noad_close] = true, -- new
[noad_punct] = true, -- new
[noad_opdisplaylimits] = true,
[noad_oplimits] = true,
@@ -1548,9 +1566,11 @@ local validpair = {
local function movesubscript(parent,current_nucleus,current_char)
local prev = getprev(parent)
if prev and getid(prev) == math_noad then
- if not getsup(prev) and not getsub(prev) then
+ local psup = getsup(prev)
+ local psub = getsub(prev)
+ if not psup and not psub then
-- {f} {'}_n => f_n^'
- setchar(current_nucleus,movesub[current_char or getchar(current_nucleus)])
+ -- setchar(current_nucleus,movesub[current_char or getchar(current_nucleus)])
local nucleus = getnucleus(parent)
local sub = getsub(parent)
local sup = getsup(parent)
@@ -1563,9 +1583,9 @@ local function movesubscript(parent,current_nucleus,current_char)
if trace_collapsing then
report_collapsing("fixing subscript")
end
- elseif not getsup(prev) then
+ elseif not psup then
-- {f} {'}_n => f_n^'
- setchar(current_nucleus,movesub[current_char or getchar(current_nucleus)])
+ -- setchar(current_nucleus,movesub[current_char or getchar(current_nucleus)])
local nucleus = getnucleus(parent)
local sup = getsup(parent)
setsup(prev,nucleus)
@@ -1592,9 +1612,9 @@ local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to tur
if next_noad and getid(next_noad) == math_noad then
if validpair[getsubtype(next_noad)] then
local next_nucleus = getnucleus(next_noad)
- local next_char = getchar(next_nucleus)
if getid(next_nucleus) == math_char then
- local newchar = mathpair[next_char]
+ local next_char = getchar(next_nucleus)
+ local newchar = mathpair[next_char]
if newchar then
local id = getfont(current_nucleus)
local characters = fontcharacters[id]
@@ -1609,30 +1629,37 @@ local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to tur
else
setnext(parent)
end
- setsup(parent,getsup(next_noad))
- setsub(parent,getsub(next_noad))
- setsup(next_noad)
- setsub(next_noad)
+ local nsup = getsup(next_noad)
+ local nsub = getsub(next_noad)
+ if nsup then
+ setsup(parent,nsup)
+ setsup(next_noad)
+ end
+ if nsub then
+ setsub(parent,nsub)
+ setsub(next_noad)
+ end
flush_node(next_noad)
collapsepair(pointer,what,n,parent,true)
- -- if not nested and movesub[current_char] then
- -- movesubscript(parent,current_nucleus,current_char)
- -- end
end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
+-- elseif not nested and movesub[current_char] then
+-- movesubscript(parent,current_nucleus,current_char)
end
end
end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
+-- elseif not nested and movesub[current_char] then
+-- movesubscript(parent,current_nucleus,current_char)
end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
+-- elseif not nested and movesub[current_char] then
+-- movesubscript(parent,current_nucleus,current_char)
end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
+-- elseif not nested and movesub[current_char] then
+-- movesubscript(parent,current_nucleus,current_char)
end
+local current_char = getchar(current_nucleus)
+if movesub[current_char] then
+ movesubscript(parent,current_nucleus,current_char)
+end
end
end
end
diff --git a/tex/context/base/mkiv/node-fnt.lua b/tex/context/base/mkiv/node-fnt.lua
index 8aa088f88..0f89e581f 100644
--- a/tex/context/base/mkiv/node-fnt.lua
+++ b/tex/context/base/mkiv/node-fnt.lua
@@ -449,7 +449,7 @@ function handlers.characters(head,groupcode,size,packtype,direction)
local attr = a > 0 and 0 or false -- 0 is the savest way
for font, processors in next, usedfonts do -- unordered
for i=1,#processors do
- local h, d = processors[i](head,font,attr,direction)
+ local h, d = processors[i](head,font,attr,direction,u)
if d then
if h then
head = h
@@ -478,7 +478,7 @@ function handlers.characters(head,groupcode,size,packtype,direction)
for font, dynamics in next, attrfonts do
for attribute, processors in next, dynamics do -- unordered, attr can switch in between
for i=1,#processors do
- local h, d = processors[i](head,font,attribute,direction)
+ local h, d = processors[i](head,font,attribute,direction,a)
if d then
if h then
head = h
diff --git a/tex/context/base/mkiv/node-nut.lua b/tex/context/base/mkiv/node-nut.lua
index b23709ff0..cbaeb8977 100644
--- a/tex/context/base/mkiv/node-nut.lua
+++ b/tex/context/base/mkiv/node-nut.lua
@@ -205,7 +205,9 @@ end
if LUATEXVERSION < 1.005 then
local getfield = direct.getfield
- function direct.getsup(n) return getfield(n,"sup") end
+ function direct.getnucleus(n) return getfield(n,"nucleus") end
+ function direct.getsub (n) return getfield(n,"sub") end
+ function direct.getsup (n) return getfield(n,"sup") end
end
-- if LUATEXVERSION < 1.004 then
@@ -856,8 +858,52 @@ end
-- here:
-nodes.set_synctex_line = node.set_synctex_line
-nodes.set_synctex_tag = node.set_synctex_tag
-
nuts.get_synctex_fields = direct.get_synctex_fields
nuts.set_synctex_fields = direct.set_synctex_fields
+
+-- for now
+
+nodes.uses_font = nodes.uses_font
+nuts.uses_font = direct.uses_font
+
+if not nuts.uses_font then
+
+ local glyph_code = nodecodes.glyph
+ local getdisc = nuts.getdisc
+ local getfont = nuts.getfont
+ local traverse_id = nuts.traverse_id
+ local tonut = nodes.tonut
+
+ function nuts.uses_font(n,font)
+ local pre, post, replace = getdisc(n)
+ if pre then
+ -- traverse_char
+ for n in traverse_id(glyph_code,pre) do
+ if getfont(n) == font then
+ return true
+ end
+ end
+ end
+ if post then
+ for n in traverse_id(glyph_code,post) do
+ if getfont(n) == font then
+ return true
+ end
+ end
+ end
+ if replace then
+ for n in traverse_id(glyph_code,replace) do
+ if getfont(n) == font then
+ return true
+ end
+ end
+ end
+ return false
+ end
+
+ function nodes.uses_font(n,font)
+ return nuts.uses_font(tonut(n),font)
+ end
+
+end
+
diff --git a/tex/context/base/mkiv/node-syn.lua b/tex/context/base/mkiv/node-syn.lua
index 1b8e07382..a196f0b95 100644
--- a/tex/context/base/mkiv/node-syn.lua
+++ b/tex/context/base/mkiv/node-syn.lua
@@ -18,11 +18,13 @@ if not modules then modules = { } end modules ['node-syn'] = {
--
-- Possible optimizations: pack whole lines.
+-- InverseSearchCmdLine = mtxrun.exe --script synctex --edit --name="%f" --line="%l" $
+
local type, rawset = type, rawset
local concat = table.concat
local formatters = string.formatters
-local trace = false trackers.register("system.syntex.visualize", function(v) trace = v end)
+local trace = false trackers.register("system.synctex.visualize", function(v) trace = v end)
local nuts = nodes.nuts
local tonut = nuts.tonut
@@ -65,7 +67,12 @@ local a_fontkern = attributes.private("fontkern")
local get_synctex_fields = nuts.get_synctex_fields
local set_synctex_fields = nuts.set_synctex_fields
-local set_syntex_tag = nodes.set_synctex_tag
+local set_synctex_line = tex.set_synctex_line
+local set_synctex_tag = tex.set_synctex_tag
+local force_synctex_tag = tex.force_synctex_tag
+local force_synctex_line = tex.force_synctex_line
+----- get_synctex_tag = tex.get_synctex_tag
+----- get_synctex_line = tex.get_synctex_line
local getcount = tex.getcount
local setcount = tex.setcount
@@ -75,6 +82,9 @@ local getpos = function()
return getpos()
end
+
+local eol = "\010"
+
local f_glue = formatters["g%i,%i:%i,%i"]
local f_glyph = formatters["x%i,%i:%i,%i"]
local f_kern = formatters["k%i,%i:%i,%i:%i"]
@@ -88,6 +98,10 @@ local f_vvoid = formatters["v%i,%i:%i,%i:%i,%i,%i"]
local characters = fonts.hashes.characters
+local foundintree = resolvers.foundintree
+local suffixonly = file.suffix
+local nameonly = file.nameonly
+
local synctex = { }
luatex.synctex = synctex
@@ -95,25 +109,56 @@ luatex.synctex = synctex
local noftags = 0
local stnums = { }
-local sttags = table.setmetatableindex(function(t,name)
- noftags = noftags + 1
- t[name] = noftags
- stnums[noftags] = name
- return noftags
+local nofblocked = 0
+local blockedfilenames = { }
+local blockedsuffixes = {
+ mkii = true,
+ mkiv = true,
+ mkvi = true,
+ mkix = true,
+ mkxi = true,
+ -- lfg = true,
+}
+
+
+local sttags = table.setmetatableindex(function(t,name)
+ if blockedsuffixes[suffixonly(name)] then
+ -- Just so that I don't get the ones on my development tree.
+ nofblocked = nofblocked + 1
+ return 0
+ elseif blockedfilenames[nameonly(name)] then
+ -- So we can block specific files.
+ nofblocked = nofblocked + 1
+ return 0
+ elseif foundintree(name) then
+ -- One shouldn't edit styles etc this way.
+ nofblocked = nofblocked + 1
+ return 0
+ else
+ noftags = noftags + 1
+ t[name] = noftags
+ stnums[noftags] = name
+ return noftags
+ end
end)
-function synctex.setfilename(name)
- if set_syntex_tag and name then
- set_syntex_tag(sttags[name])
+function synctex.blockfilename(name)
+ blockedfilenames[nameonly(name)] = name
+end
+
+function synctex.setfilename(name,line)
+ if force_synctex_tag and name then
+ force_synctex_tag(sttags[name])
+ if line then
+ force_synctex_line(line)
+ end
end
end
function synctex.resetfilename()
- if set_syntex_tag then
- local name = luatex.currentfile()
- if name then
- set_syntex_tag(name)
- end
+ if force_synctex_tag then
+ force_synctex_tag(0)
+ force_synctex_line(0)
end
end
@@ -128,10 +173,14 @@ local last = 0
local filesdone = 0
local enabled = false
local compact = true
+-- local compact = false
+local fulltrace = false
+-- local fulltrace = true
+local logfile = false
local function writeanchor()
local size = f:seek("end")
- f:write("!" .. (size-last) .. "\n")
+ f:write("!" .. (size-last) ..eol)
last = size
end
@@ -139,34 +188,35 @@ local function writefiles()
local total = #stnums
if filesdone < total then
for i=filesdone+1,total do
- f:write("Input:"..i..":"..stnums[i].."\n")
+ f:write("Input:"..i..":"..stnums[i]..eol)
end
filesdone = total
end
end
local function flushpreamble()
- local jobname = tex.jobname
- stnums[0] = jobname
- f = io.open(file.replacesuffix(jobname,"syncctx"),"w")
- f:write("SyncTeX Version:1\n")
- f:write("Input:0:"..jobname.."\n")
+ logfile = file.replacesuffix(tex.jobname,"syncctx")
+ f = io.open(logfile,"wb")
+ f:write("SyncTeX Version:1"..eol)
writefiles()
- f:write("Output:pdf\n")
- f:write("Magnification:1000\n")
- f:write("Unit:1\n")
- f:write("X Offset:0\n")
- f:write("Y Offset:0\n")
- f:write("Content:\n")
+ f:write("Output:pdf"..eol)
+ f:write("Magnification:1000"..eol)
+ f:write("Unit:1"..eol)
+ f:write("X Offset:0"..eol)
+ f:write("Y Offset:0"..eol)
+ f:write("Content:"..eol)
flushpreamble = writefiles
end
local function flushpostamble()
+ if not f then
+ return
+ end
writeanchor()
- f:write("Postamble:\n")
- f:write("Count:"..nofobjects.."\n")
+ f:write("Postamble:"..eol)
+ f:write("Count:"..nofobjects..eol)
writeanchor()
- f:write("Post scriptum:\n")
+ f:write("Post scriptum:"..eol)
f:close()
enabled = false
end
@@ -261,7 +311,14 @@ end
-- end))
-- end
-local function collect(head,t,l)
+-- todo: why not only lines
+-- todo: larger ranges
+
+-- color is already handled so no colors
+
+-- we can have ranges .. more efficient but a bit more complex to analyze ... some day
+
+local function collect(head,t,l,dp,ht)
local current = head
while current do
local id = getid(current)
@@ -270,9 +327,20 @@ local function collect(head,t,l)
local last = current
while true do
id = getid(current)
+ -- traditionally glyphs have no synctex code which works sort of ok
+ -- but not when we don't leave hmode cq. have no par
+ --
if id == glyph_code or id == disc_code then
+ local tc, lc = get_synctex_fields(current)
+ if tc and tc > 0 then
+ t, l = tc, lc
+ end
last = current
elseif id == kern_code and (getsubtype(current) == fontkern_code or getattr(current,a_fontkern)) then
+ local tc, lc = get_synctex_fields(current)
+ if tc and tc > 0 then
+ t, l = tc, lc
+ end
last = current
else
if id == glue_code then
@@ -285,16 +353,13 @@ local function collect(head,t,l)
end
local w, h, d = getdimensions(first,getnext(last))
-- local w, h, d = getrangedimensions(head,first,getnext(last))
+ if dp and d < dp then d = dp end
+ if ht and h < ht then h = ht end
+ if h < 655360 then h = 655360 end
+ if d < 327680 then d = 327680 end
if trace then
- -- color is already handled so no colors
- head = insert_before(head,first,new_hlist(new_rule(w,32768,32768)))
+ head = insert_before(head,first,new_hlist(new_rule(w,fulltrace and h or 32768,fulltrace and d or 32768)))
end
-if h < 655360 then
- h = 655360
-end
-if d < 327680 then
- d = 327680
-end
head = x_hlist(head,first,t,l,w,h,d)
break
end
@@ -302,16 +367,13 @@ end
if not current then
local w, h, d = getdimensions(first,getnext(last))
-- local w, h, d = getrangedimensions(head,first,getnext(last))
+ if dp and d < dp then d = dp end
+ if ht and h < ht then h = ht end
+ if h < 655360 then h = 655360 end
+ if d < 327680 then d = 327680 end
if trace then
- -- color is already handled so no colors
- head = insert_before(head,first,new_hlist(new_rule(w,32768,32768)))
+ head = insert_before(head,first,new_hlist(new_rule(w,fulltrace and h or 32768,fulltrace and d or 32768)))
end
-if h < 655360 then
- h = 655360
-end
-if d < 327680 then
- d = 327680
-end
head = x_hlist(head,first,t,l,w,h,d)
return head
end
@@ -341,15 +403,15 @@ end
end
elseif list then
-- head = b_hlist(head,current,t,l,w,h,d)
- head = b_hlist(head,current,0,0,w,h,d)
- local l = collect(list,t,l)
+ head = b_hlist(head,current,0,0,w,h,d) -- todo: only d h when line
+ local l = collect(list,t,l,d,h)
if l ~= list then
setlist(current,l)
end
head, current = e_hlist(head,current)
else
-- head = x_hlist(head,current,t,l,w,h,d)
- head = x_hlist(head,current,0,0,w,h,d)
+ head = x_hlist(head,current,0,0,w,h,d) -- todo: only d h when line
end
end
elseif id == vlist_code then
@@ -436,37 +498,71 @@ function synctex.flush()
nofsheets = nofsheets + 1 -- could be realpageno
flushpreamble()
writeanchor()
- f:write("{"..nofsheets.."\n")
+ f:write("{"..nofsheets..eol)
if compact then
- f:write(f_vlist(0,0,0,0,tex.pagewidth,tex.pageheight,0))
- f:write("\n")
+ -- f:write(f_vlist(0,0,0,0,tex.pagewidth,tex.pageheight,0))
+ f:write(f_hlist(0,0,0,0,0,0,0))
+ f:write(eol)
+ f:write(f_vlist(0,0,0,0,0,0,0))
+ f:write(eol)
end
- f:write(concat(result,"\n"))
+ f:write(concat(result,eol))
if compact then
- f:write("\n")
+ f:write(eol)
f:write(s_vlist)
+ f:write(eol)
+ f:write(s_hlist)
end
- f:write("\n")
+ f:write(eol)
writeanchor()
- f:write("}"..nofsheets.."\n")
+ f:write("}"..nofsheets..eol)
nofobjects = nofobjects + 2
result, r = { }, 0
end
end
-function synctex.enable()
- if not enabled and node.set_synctex_mode then
- enabled = true
- node.set_synctex_mode(1)
- tex.normalsynctex = 0
- nodes.tasks.appendaction("shipouts", "after", "nodes.synctex.collect")
+local details = 1
+local state = 0
+
+directives.register("system.synctex.details",function(v)
+ details = tonumber(v) or 1
+end)
+
+local set_synctex_mode = tex.set_synctex_mode
+
+if set_synctex_mode then
+
+ function synctex.enable()
+ if not enabled then
+ enabled = true
+ state = details or 1
+ set_synctex_mode(state)
+ tex.normalsynctex = 0
+ directives.enable("system.synctex.xml")
+ nodes.tasks.appendaction("shipouts", "after", "nodes.synctex.collect")
+ elseif state > 0 then
+ set_synctex_mode(state)
+ end
end
-end
-function synctex.finish()
- if enabled then
- flushpostamble()
+ function synctex.disable()
+ if enabled then
+ set_synctex_mode(0)
+ end
end
+
+ function synctex.finish()
+ if enabled then
+ flushpostamble()
+ end
+ end
+
+else
+
+ function synctex.enable () end
+ function synctex.disable() end
+ function synctex.finish () end
+
end
-- not the best place
@@ -478,27 +574,60 @@ nodes.tasks.appendaction("shipouts", "after", "luatex.synctex.collect")
-- moved here
local report_system = logs.reporter("system")
-local synctex = false
+local synctex_used = false
-directives.register("system.synctex", function(v)
+local function setsynctex(v)
if v == "context" then
- luatex.synctex.enable()
+ synctex.enable()
setcount("normalsynctex",0)
- synctex = true
+ synctex_used = true
else
v = tonumber(v) or (toboolean(v,true) and 1) or (v == "zipped" and 1) or (v == "unzipped" and -1) or 0
setcount("normalsynctex",v)
- synctex = v ~= 0
+ synctex_used = v ~= 0
end
- if synctex then
+ if synctex_used then
report_system("synctex functionality is enabled (%s), expect runtime overhead!",tostring(v))
else
report_system("synctex functionality is disabled!")
end
+end
+
+directives.register("system.synctex", setsynctex) -- 0|1|false|true|zipped|unzipped|context
+
+directives.register("system.synctex.context", function(v)
+ setsynctex(v and "context" or false)
end)
statistics.register("synctex tracing",function()
- if synctex or getcount("normalsynctex") ~= 0 then
- return "synctex has been enabled (extra log file generated)"
+ if synctex_used or getcount("normalsynctex") ~= 0 then
+ return string.format("%i referenced files, %i files ignored, logfile: %s",noftags,nofblocked,logfile)
end
end)
+
+interfaces.implement {
+ name = "synctexblockfilename",
+ arguments = "string",
+ actions = synctex.blockfilename,
+}
+
+interfaces.implement {
+ name = "synctexsetfilename",
+ arguments = "string",
+ actions = synctex.setfilename,
+}
+
+interfaces.implement {
+ name = "synctexresetfilename",
+ actions = synctex.resetfilename,
+}
+
+interfaces.implement {
+ name = "synctexenable",
+ actions = synctex.enable,
+}
+
+interfaces.implement {
+ name = "synctexdisable",
+ actions = synctex.disable,
+}
diff --git a/tex/context/base/mkiv/page-one.mkiv b/tex/context/base/mkiv/page-one.mkiv
index 9bd6951f3..59efbd204 100644
--- a/tex/context/base/mkiv/page-one.mkiv
+++ b/tex/context/base/mkiv/page-one.mkiv
@@ -456,7 +456,6 @@
\fi\fi
\fi}
-
\def\page_one_place_float_here_indeed
{\ifgridsnapping
% otherwise real bad outcome
diff --git a/tex/context/base/mkiv/publ-aut.lua b/tex/context/base/mkiv/publ-aut.lua
index 6ad8b1296..6a8e52cb8 100644
--- a/tex/context/base/mkiv/publ-aut.lua
+++ b/tex/context/base/mkiv/publ-aut.lua
@@ -313,12 +313,12 @@ local function the_initials(initials,symbol,connector)
s = s + 1 ; set[s] = connector
end
s = s + 1 ; set[s] = initial[i]
- s = s + 1 ; set[s] = symbol
end
r = r + 1 ; result[r] = concat(set)
else
- r = r + 1 ; result[r] = initial .. symbol
+ r = r + 1 ; result[r] = initial
end
+ r = r + 1 ; result[r] = symbol
end
return result
end
@@ -339,8 +339,9 @@ local ctx_btxstopauthor = context.btxstopauthor
local concatstate = publications.concatstate
local f_invalid = formatters["<invalid %s: %s>"]
-local currentauthordata = nil
-local currentauthorsymbol = nil
+local currentauthordata = nil
+local currentauthorsymbol = nil
+local currentauthorconnector = nil
local manipulators = typesetters.manipulators
local splitmanipulation = manipulators.splitspecification
@@ -359,7 +360,7 @@ local function value(i,field)
end
implement { name = "btxcurrentfirstnames", arguments = "integer", actions = function(i) local v = value(i,"firstnames") if v then context(concat(v," ")) end end }
-implement { name = "btxcurrentinitials", arguments = "integer", actions = function(i) local v = value(i,"initials") if v then context(concat(the_initials(v,currentauthorsymbol))) end end }
+implement { name = "btxcurrentinitials", arguments = "integer", actions = function(i) local v = value(i,"initials") if v then context(concat(the_initials(v,currentauthorsymbol,currentauthorconnector))) end end }
implement { name = "btxcurrentjuniors", arguments = "integer", actions = function(i) local v = value(i,"juniors") if v then context(concat(v," ")) end end }
implement { name = "btxcurrentsurnames", arguments = "integer", actions = function(i) local v = value(i,"surnames") if v then context(concat(v," ")) end end }
implement { name = "btxcurrentvons", arguments = "integer", actions = function(i) local v = value(i,"vons") if v then context(concat(v," ")) end end }
@@ -408,6 +409,7 @@ local function btxauthor(dataset,tag,field,settings)
local etallast = etaloption[v_last]
local combiner = settings.combiner
local symbol = settings.symbol
+ local connector = settings.connector
local index = settings.index
if not combiner or combiner == "" then
combiner = "normal"
@@ -421,8 +423,9 @@ local function btxauthor(dataset,tag,field,settings)
else
etallast = false
end
- currentauthordata = split
- currentauthorsymbol = symbol
+ currentauthordata = split
+ currentauthorsymbol = symbol
+ currentauthorconnector = connector
local function oneauthor(i,last,justone)
local author = split[i]
@@ -508,6 +511,7 @@ implement {
{ "etaldisplay" },
{ "etaloption" },
{ "symbol" },
+ { "connector" },
}
}
}
diff --git a/tex/context/base/mkiv/publ-ini.mkiv b/tex/context/base/mkiv/publ-ini.mkiv
index 5f9aaa692..1a45ec587 100644
--- a/tex/context/base/mkiv/publ-ini.mkiv
+++ b/tex/context/base/mkiv/publ-ini.mkiv
@@ -1084,6 +1084,7 @@
etaldisplay {\btxparameter\c!etaldisplay}%
etaloption {\btxparameter\c!etaloption}%
symbol {\btxparameter{\c!stopper:initials}}%
+ connector {\btxparameter{\c!connector:initials}}%
}%
\relax
\endgroup}
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index ac5183783..a38925207 100644
--- a/tex/context/base/mkiv/status-files.pdf
+++ b/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 7e492a7e7..8e140db0c 100644
--- a/tex/context/base/mkiv/status-lua.pdf
+++ b/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/tabl-frm.mkiv b/tex/context/base/mkiv/tabl-frm.mkiv
index 639d6f06d..c0f4b7cd1 100644
--- a/tex/context/base/mkiv/tabl-frm.mkiv
+++ b/tex/context/base/mkiv/tabl-frm.mkiv
@@ -134,6 +134,56 @@
{\framedparameter\c!after
\endgroup}
+% \unexpanded\def\startframedrow
+% {\advance\c_tabl_framed_r\plusone
+% \c_tabl_framed_c\zerocount
+% \d_tabl_framed_h\zeropoint
+% \bgroup
+% \edef\currentframed{\number\c_tabl_framed_r}%
+% \edef\currentframed
+% {\??framedtablerow\currentframedtable
+% \ifcsname\??framedtablerow\currentframedtable:\currentframed\endcsname
+% :\currentframed
+% \else\ifcsname\??framedtablerow\currentframedtable:\v!each\endcsname
+% :\v!each
+% \fi\fi}%
+% \dosingleempty\pack_framed_start_framed_nop_indeed}
+%
+% \unexpanded\def\stopframedrow
+% {\dofastloopcs\c_tabl_framed_c\tabl_framed_flush_row
+% \stopframed
+% \nointerlineskip
+% \vskip\zeropoint\relax
+% \framedparameter\c!inbetween}
+%
+% \unexpanded\def\tabl_framed_flush_row
+% {\vpack to \d_tabl_framed_h{\flushbox\??framedtable{\number\fastloopindex}\vfill}%
+% \ifdim\d_tabl_framed_d=\zeropoint\else\kern\d_tabl_framed_d\fi}
+%
+% \unexpanded\def\startframedcell
+% {\advance\c_tabl_framed_c\plusone
+% \setbox\b_tabl_framed\hpack\bgroup
+% %\bgroup
+% \edef\currentframed{\number\c_tabl_framed_c}%
+% \edef\currentframed
+% {\??framedtablecolumn\currentframedtable
+% \ifcsname\??framedtablecolumn\currentframedtable:\currentframed\endcsname
+% :\currentframed
+% \else\ifcsname\??framedtablecolumn\currentframedtable:\v!each\endcsname
+% :\v!each
+% \fi\fi}%
+% \dosingleempty\pack_framed_start_framed_nop_indeed}
+%
+% \unexpanded\def\stopframedcell
+% {\stopframed
+% %\egroup
+% \ifdim\ht\b_tabl_framed>\d_tabl_framed_h
+% \d_tabl_framed_h\ht\b_tabl_framed
+% \fi
+% \savebox\??framedtable{\number\c_tabl_framed_c}{\box\b_tabl_framed}}
+
+% a two pass variant that deals with the height .. so no catcode changes here
+
\unexpanded\def\startframedrow
{\advance\c_tabl_framed_r\plusone
\c_tabl_framed_c\zerocount
@@ -160,6 +210,52 @@
{\vpack to \d_tabl_framed_h{\flushbox\??framedtable{\number\fastloopindex}\vfill}%
\ifdim\d_tabl_framed_d=\zeropoint\else\kern\d_tabl_framed_d\fi}
+\newcount\c_tabl_framed_pass
+
+\let\stopframedrow\relax
+
+\unexpanded\def\startframedrow#1\stopframedrow
+ {\advance\c_tabl_framed_r\plusone
+ \startframedrow_one#1\stopframedrow_one
+ \startframedrow_two#1\stopframedrow_two}
+
+\def\startframedrow_one
+ {\bgroup
+ \c_tabl_framed_pass\plusone
+ \c_tabl_framed_c\zerocount
+ \d_tabl_framed_h\zeropoint
+ \settrialtypesetting
+ \gobblesingleempty}
+
+\unexpanded\def\stopframedrow_one
+ {\normalexpanded{\egroup\d_tabl_framed_h\the\d_tabl_framed_h\relax}}
+
+\def\startframedrow_two
+ {\bgroup
+ \c_tabl_framed_c\zerocount
+ \c_tabl_framed_pass\plustwo
+ \edef\currentframed{\number\c_tabl_framed_r}%
+ \edef\currentframed
+ {\??framedtablerow\currentframedtable
+ \ifcsname\??framedtablerow\currentframedtable:\currentframed\endcsname
+ :\currentframed
+ \else\ifcsname\??framedtablerow\currentframedtable:\v!each\endcsname
+ :\v!each
+ \fi\fi}%
+ \dosingleempty\pack_framed_start_framed_nop_indeed}
+
+\unexpanded\def\stopframedrow_two
+ {\dofastloopcs\c_tabl_framed_c\tabl_framed_flush_row
+ \stopframed
+ \nointerlineskip
+ \vskip\zeropoint\relax
+ \framedparameter\c!inbetween}
+
+\unexpanded\def\tabl_framed_flush_row_two
+ {\vpack to \d_tabl_framed_h{\flushbox\??framedtable{\number\fastloopindex}\vfill}%
+ \ifdim\d_tabl_framed_d=\zeropoint\else\kern\d_tabl_framed_d\fi}
+
+
\unexpanded\def\startframedcell
{\advance\c_tabl_framed_c\plusone
\setbox\b_tabl_framed\hpack\bgroup
@@ -172,15 +268,26 @@
\else\ifcsname\??framedtablecolumn\currentframedtable:\v!each\endcsname
:\v!each
\fi\fi}%
+ \ifcase\c_tabl_framed_pass
+ \or
+ \letframedparameter\c!background\empty
+ \letframedparameter\c!frame\v!off
+ \or
+ \letframedparameter\c!height\d_tabl_framed_h
+ \fi
\dosingleempty\pack_framed_start_framed_nop_indeed}
\unexpanded\def\stopframedcell
{\stopframed
%\egroup
- \ifdim\ht\b_tabl_framed>\d_tabl_framed_h
- \d_tabl_framed_h\ht\b_tabl_framed
- \fi
- \savebox\??framedtable{\number\c_tabl_framed_c}{\box\b_tabl_framed}}
+ \ifcase\c_tabl_framed_pass
+ \or
+ \ifdim\ht\b_tabl_framed>\d_tabl_framed_h
+ \d_tabl_framed_h\ht\b_tabl_framed
+ \fi
+ \else
+ \savebox\??framedtable{\number\c_tabl_framed_c}{\box\b_tabl_framed}%
+ \fi}
\protect \endinput
diff --git a/tex/context/base/mkiv/trac-set.lua b/tex/context/base/mkiv/trac-set.lua
index d0047650f..77de85e8e 100644
--- a/tex/context/base/mkiv/trac-set.lua
+++ b/tex/context/base/mkiv/trac-set.lua
@@ -219,13 +219,30 @@ function setters.show(t)
local name = list[k]
local functions = t.data[name]
if functions then
- local value, default, modules = functions.value, functions.default, #functions
- value = value == nil and "unset" or tostring(value)
- default = default == nil and "unset" or tostring(default)
- t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ local value = functions.value
+ local default = functions.default
+ local modules = #functions
+ if default == nil then
+ default = "unset"
+ elseif type(default) == "table" then
+ default = concat(default,"|")
+ else
+ default = tostring(default)
+ end
+ if value == nil then
+ value = "unset"
+ elseif type(value) == "table" then
+ value = concat(value,"|")
+ else
+ value = tostring(value)
+ end
+ t.report(name)
+ t.report(" modules : %i",modules)
+ t.report(" default : %s",default)
+ t.report(" value : %s",value)
+ t.report()
end
end
- t.report()
end
-- we could have used a bit of oo and the trackers:enable syntax but
diff --git a/tex/context/base/mkiv/trac-tex.lua b/tex/context/base/mkiv/trac-tex.lua
index 66cdc2c91..38035a044 100644
--- a/tex/context/base/mkiv/trac-tex.lua
+++ b/tex/context/base/mkiv/trac-tex.lua
@@ -84,7 +84,7 @@ local function saveusedfilesintrees(format)
jobname = environment.jobname or "?",
version = environment.version or "?",
kind = environment.kind or "?",
- files = resolvers.instance.foundintrees
+ files = resolvers.foundintrees()
}
local filename = file.replacesuffix(environment.jobname or "context-job",'jlg')
if format == "lua" then