summaryrefslogtreecommitdiff
path: root/tex/context
diff options
context:
space:
mode:
Diffstat (limited to 'tex/context')
-rw-r--r--tex/context/base/back-exp.lua96
-rw-r--r--tex/context/base/back-exp.mkiv2
-rw-r--r--tex/context/base/cont-new.mkii2
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context.mkii2
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/core-con.mkiv2
-rw-r--r--tex/context/base/data-tmp.lua2
-rw-r--r--tex/context/base/font-ini.mkiv2
-rw-r--r--tex/context/base/font-mis.lua2
-rw-r--r--tex/context/base/font-ota.lua9
-rw-r--r--tex/context/base/font-otd.lua9
-rw-r--r--tex/context/base/font-otf.lua95
-rw-r--r--tex/context/base/font-otn.lua389
-rw-r--r--tex/context/base/font-otp.lua24
-rw-r--r--tex/context/base/lpdf-epd.lua5
-rw-r--r--tex/context/base/node-ini.lua2
-rw-r--r--tex/context/base/s-abr-01.tex1
-rw-r--r--tex/context/base/scrn-pag.mkvi125
-rw-r--r--tex/context/base/status-files.pdfbin23581 -> 23665 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin154910 -> 154918 bytes
-rw-r--r--tex/context/base/trac-set.lua6
-rw-r--r--tex/context/base/util-deb.lua2
23 files changed, 526 insertions, 255 deletions
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index 8450b36f1..0d5b7cede 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -18,6 +18,8 @@ if not modules then modules = { } end modules ['back-exp'] = {
-- todo: less attributes e.g. internal only first node
-- todo: build xml tree in mem (handy for cleaning)
+-- delimited: left/right string (needs marking)
+
local nodecodes = nodes.nodecodes
local traverse_nodes = node.traverse
local hlist_code = nodecodes.hlist
@@ -59,6 +61,8 @@ local nodes = nodes
local attributes = attributes
local variables = interfaces.variables
+local settings_to_array = utilities.parsers.settings_to_array
+
local setmetatableindex = table.setmetatableindex
local tasks = nodes.tasks
local fontchar = fonts.hashes.characters
@@ -136,22 +140,59 @@ local extras = { }
local nofbreaks = 0
local used = { }
local exporting = false
-
-setmetatableindex(used, function(t,k) if k then local v = { } t[k] = v return v end end)
-
local last = nil
local lastpar = nil
-local joiner_1 = " "
-local joiner_2 = " " -- todo: test if this one can always be ""
-local joiner_3 = " "
-local joiner_4 = " "
-local joiner_5 = " "
-local joiner_6 = " "
-local joiner_7 = "\n"
-local joiner_8 = " "
-local joiner_9 = " "
-local joiner_0 = " "
+setmetatableindex(used, function(t,k)
+ if k then
+ local v = { }
+ t[k] = v
+ return v
+ end
+end)
+
+local joiner_1 = " "
+local joiner_2 = " " -- todo: test if this one can always be ""
+local joiner_3 = " "
+local joiner_4 = " "
+local joiner_5 = " "
+local joiner_6 = " "
+local joiner_7 = "\n"
+local joiner_8 = " "
+local joiner_9 = " "
+local joiner_0 = " "
+
+local namespaced = {
+ -- filled on
+}
+
+local namespaces = {
+ msubsup = "m",
+ msub = "m",
+ msup = "m",
+ mn = "m",
+ mi = "m",
+ ms = "m",
+ mo = "m",
+ mtext = "m",
+ mrow = "m",
+ mfrac = "m",
+ mroot = "m",
+ msqrt = "m",
+ munderover = "m",
+ munder = "m",
+ mover = "m",
+ merror = "m",
+ math = "m",
+ mrow = "m",
+}
+
+setmetatableindex(namespaced, function(t,k)
+ local namespace = namespaces[k]
+ local v = namespace and namespace .. ":" .. k or k
+ t[k] = v
+ return v
+end)
-- local P, C, Cc = lpeg.P, lpeg.C, lpeg.Cc
--
@@ -218,6 +259,7 @@ function extras.document(handle,element,detail,n,fulltag,hash)
handle:write(format(" date=%q",os.date()))
handle:write(format(" context=%q",environment.version))
handle:write(format(" version=%q",version))
+ handle:write(format(" xmlns:m=%q","http://www.w3.org/1998/Math/MathML"))
local identity = interactions.general.getidentity()
for i=1,#fields do
local key = fields[i]
@@ -588,7 +630,7 @@ function extras.tabulatecell(handle,element,detail,n,fulltag,di)
end
local function emptytag(handle,element,nature,depth)
- handle:write("\n",spaces[depth],"<",element,"/>\n")
+ handle:write("\n",spaces[depth],"<",namespaced[element],"/>\n")
end
local function begintag(handle,element,nature,depth,di,empty)
@@ -614,7 +656,7 @@ local function begintag(handle,element,nature,depth,di,empty)
linedone = false
end
end
- handle:write("<",element)
+ handle:write("<",namespaced[element])
if detail then
handle:write(" detail='",detail,"'")
end
@@ -651,14 +693,14 @@ local function endtag(handle,element,nature,depth,empty)
if not linedone then
handle:write("\n")
end
- handle:write(spaces[depth],"</",element,">\n")
+ handle:write(spaces[depth],"</",namespaced[element],">\n")
end
linedone = true
else
if empty then
handle:write("/>")
else
- handle:write("</",element,">")
+ handle:write("</",namespaced[element],">")
end
end
else
@@ -666,7 +708,7 @@ local function endtag(handle,element,nature,depth,empty)
if empty then
handle:write("/>")
else
- handle:write("</",element,">")
+ handle:write("</",namespaced[element],">")
end
linedone = false
end
@@ -1025,13 +1067,17 @@ local function stopexport(v)
report_export("saving xml data in '%s",xmlfile)
handle:write(format(xmlpreamble,tex.jobname,os.date(),environment.version,version))
if cssfile then
- if type(v) ~= "string" or cssfile == variables.yes or cssfile == "" or cssfile == xmlfile then
- cssfile = file.replacesuffix(xmlfile,"css")
- else
- cssfile = file.addsuffix(cssfile,"css")
+ local cssfiles = settings_to_array(cssfile)
+ for i=1,#cssfiles do
+ local cssfile = cssfiles[i]
+ if type(cssfile) ~= "string" or cssfile == variables.yes or cssfile == "" or cssfile == xmlfile then
+ cssfile = file.replacesuffix(xmlfile,"css")
+ else
+ cssfile = file.addsuffix(cssfile,"css")
+ end
+ report_export("adding css reference '%s",cssfile)
+ handle:write(format(csspreamble,cssfile))
end
- report_export("adding css reference '%s",cssfile)
- handle:write(format(csspreamble,cssfile))
end
flushtree(handle,tree.data)
handle:close()
@@ -1104,7 +1150,7 @@ local function startexport(v)
end
end
-directives.register("backend.export",startexport)
+directives.register("backend.export",startexport) -- maybe .name
local function injectbreak()
flushresult(entry)
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index 2da163a7e..09eaf0109 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -116,7 +116,7 @@
\enabledirectives
[backend.export=\backendparameter\c!export,%
backend.export.xhtml=\backendparameter\c!xhtml,%
- backend.export.css=\backendparameter\c!css]}%
+ backend.export.css={\backendparameter\c!css}]}%
\to \everysetupbackend
\protect \endinput
diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii
index 7c6708b07..02109ffe8 100644
--- a/tex/context/base/cont-new.mkii
+++ b/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2011.04.11 18:55}
+\newcontextversion{2011.04.13 09:23}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 952be95dc..0953026d0 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2011.04.11 18:55}
+\newcontextversion{2011.04.13 09:23}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii
index 78ab41d79..5101aaa80 100644
--- a/tex/context/base/context.mkii
+++ b/tex/context/base/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2011.04.11 18:55}
+\edef\contextversion{2011.04.13 09:23}
%D For those who want to use this:
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 4028bc61b..070b2c3e8 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2011.04.11 18:55}
+\edef\contextversion{2011.04.13 09:23}
%D For those who want to use this:
diff --git a/tex/context/base/core-con.mkiv b/tex/context/base/core-con.mkiv
index 75f2b6acd..e43e4ecda 100644
--- a/tex/context/base/core-con.mkiv
+++ b/tex/context/base/core-con.mkiv
@@ -519,7 +519,7 @@
\normalmonth\@@dam\relax
\normalyear \@@day\relax
\fi
- \docurrentdate{#2}%
+ \docurrentdate[#2]%
\endgroup}
%D \macros
diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua
index 55986d727..ec6f91e24 100644
--- a/tex/context/base/data-tmp.lua
+++ b/tex/context/base/data-tmp.lua
@@ -290,7 +290,7 @@ function caches.is_writable(filepath,filename)
return file.is_writable(tmaname)
end
-local saveoptions = { reduce = true }
+local saveoptions = { compact = true }
function caches.savedata(filepath,filename,data,raw)
local tmaname, tmcname = caches.setluanames(filepath,filename)
diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv
index b179aae35..b0efeaee1 100644
--- a/tex/context/base/font-ini.mkiv
+++ b/tex/context/base/font-ini.mkiv
@@ -991,11 +991,13 @@
\def\dododefinefontsynonymnop[#1]%
{\let\@@ff@@features \undefined
\let\@@ff@@fallbacks\undefined
+ \let\@@ff@@goodies \undefined
\expandafter\dogetfontparameternop#1,]=,}
\def\dododefinefontsynonymyes[#1]%
{\let\@@ff@@features \undefined
\let\@@ff@@fallbacks\undefined
+ \let\@@ff@@goodies \undefined
\expandafter\dogetfontparameteryes#1,]=,}
\def\dogetfontparameternop#1=#2,%
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 3de1cd30d..6d67e70f6 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -22,7 +22,7 @@ local handlers = fonts.handlers
handlers.otf = handlers.otf or { }
local otf = handlers.otf
-otf.version = otf.version or 2.722
+otf.version = otf.version or 2.727
otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
function otf.loadcached(filename,format,sub)
diff --git a/tex/context/base/font-ota.lua b/tex/context/base/font-ota.lua
index cb41194ee..1bf736531 100644
--- a/tex/context/base/font-ota.lua
+++ b/tex/context/base/font-ota.lua
@@ -56,8 +56,6 @@ process features right.</p>
-- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
-- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
--- an example analyzer (should move to font-ota.lua)
-
local state = attributes.private('state')
function analyzers.setstate(head,font)
@@ -165,7 +163,8 @@ registerotffeature {
methods.latn = analyzers.setstate
--- this info eventually will go into char-def
+-- this info eventually will go into char-def adn we will have a state
+-- table for generic then
local zwnj = 0x200C
local zwj = 0x200D
@@ -351,3 +350,7 @@ function methods.arab(head,font,attr) -- maybe make a special version with no tr
first, last = finish(first,last)
return head, done
end
+
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks = v
+end)
diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua
index 84811f0e1..b22889217 100644
--- a/tex/context/base/font-otd.lua
+++ b/tex/context/base/font-otd.lua
@@ -77,6 +77,7 @@ function otf.setdynamics(font,attribute)
shared.features = { }
-- end of save
local set = constructors.checkedfeatures("otf",features)
+set.mode = "node" -- really needed
dsla = otf.setfeatures(tfmdata,set)
if trace_dynamics then
report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,table.sequenced(set))
@@ -117,7 +118,7 @@ local resolved = { } -- we only resolve a font,script,language,attribute pair on
local wildcard = "*"
local default = "dflt"
-local function initialize(sequence,script,language,s_enabled,a_enabled,attr,dynamic)
+local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic)
local features = sequence.features
if features then
for kind, scripts in next, features do
@@ -149,8 +150,8 @@ local function initialize(sequence,script,language,s_enabled,a_enabled,attr,dyna
if trace_applied then
local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
report_process(
- "%s font: %03i, dynamic: %03i, kind: %s, lookup: %3i, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
- (valid and "+") or "-",font,attr or 0,kind,s,script,language,what,typ,action,sequence.name)
+ "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
+ (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
end
return { valid, attribute, sequence.chain or 0, kind }
end
@@ -207,7 +208,7 @@ function otf.dataset(tfmdata,sequences,font,attr)
ra = { }
rl[attr] = ra
setmetatableindex(ra, function(t,k)
- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,attr,dynamic)
+ local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic)
t[k] = v
return v
end)
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index e66e3c01b..8faa88b64 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -22,7 +22,7 @@ local getn = table.getn
local lpegmatch = lpeg.match
local reversed, concat, remove = table.reversed, table.concat, table.remove
local ioflush = io.flush
-local fastcopy = table.fastcopy
+local fastcopy, tohash = table.fastcopy, table.tohash
local allocate = utilities.storage.allocate
local registertracker = trackers.register
@@ -47,7 +47,7 @@ local otf = fonts.handlers.otf
otf.glists = { "gsub", "gpos" }
-otf.version = 2.722 -- beware: also sync font-mis.lua
+otf.version = 2.727 -- beware: also sync font-mis.lua
otf.cache = containers.define("fonts", "otf", otf.version, true)
local fontdata = fonts.hashes.identifiers
@@ -1017,6 +1017,11 @@ actions["prepare lookups"] = function(data,filename,raw)
end
end
+-- The reverse handler does a bit redundant splitting but it's seldom
+-- seen so we don' tbother too much. We could store the replacement
+-- in the current list (value instead of true) but it makes other code
+-- uglier. Maybe some day.
+
local function t_uncover(splitter,cache,covers)
local result = { }
for n=1,#covers do
@@ -1031,6 +1036,26 @@ local function t_uncover(splitter,cache,covers)
return result
end
+local function t_hashed(t,cache)
+ if t then
+ local h = { }
+ for i=1,#t do
+ local ti = t[i]
+ local h = cache[ti]
+ if not h then
+ h = { }
+ for i=1,#ti do
+ h[ti] = true
+ end
+ end
+ cache[ti] = h
+ end
+ return h
+ else
+ return nil
+ end
+end
+
local function s_uncover(splitter,cache,cover)
if cover == "" then
return nil
@@ -1038,17 +1063,44 @@ local function s_uncover(splitter,cache,cover)
local uncovered = cache[cover]
if not uncovered then
uncovered = lpegmatch(splitter,cover)
+ for i=1,#uncovered do
+ uncovered[i] = { [uncovered[i]] = true }
+ end
cache[cover] = uncovered
end
return uncovered
end
end
+local s_hashed = t_hashed
+
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover == "" then
+ return nil
+ else
+ -- we always have current as { } even in the case of one
+ local uncovered = cover[1]
+ local replaced = cache[replacements]
+ if not replaced then
+ replaced = lpegmatch(splitter,replacements)
+ cache[replacements] = replaced
+ end
+ local nu, nr = #uncovered, #replaced
+ local r = { }
+ if nu == nr then
+ for i=1,nu do
+ r[uncovered[i]] = replaced[i]
+ end
+ end
+ return r
+ end
+end
+
actions["reorganize lookups"] = function(data,filename,raw)
-- we prefer the before lookups in a normal order
if data.lookups then
local splitter = data.helpers.tounicodetable
- local cache = { }
+ local cache, h_cache = { }, { }
for _, lookup in next, data.lookups do
local rules = lookup.rules
if rules then
@@ -1074,7 +1126,7 @@ actions["reorganize lookups"] = function(data,filename,raw)
for i=1,#before do
before[i] = before_class[before[i]] or { }
end
- rule.before = before
+ rule.before = t_hashed(before,h_cache)
end
local current = class.current
local lookups = rule.lookups
@@ -1085,14 +1137,14 @@ actions["reorganize lookups"] = function(data,filename,raw)
lookups[i] = false
end
end
- rule.current = current
+ rule.current = t_hashed(current,h_cache)
end
local after = class.after
if after then
for i=1,#after do
after[i] = after_class[after[i]] or { }
end
- rule.after = after
+ rule.after = t_hashed(after,h_cache)
end
rule.class = nil
end
@@ -1107,39 +1159,45 @@ actions["reorganize lookups"] = function(data,filename,raw)
if coverage then
local before = coverage.before
if before then
- rule.before = t_uncover(splitter,cache,reversed(before))
+ before = t_uncover(splitter,cache,reversed(before))
+ rule.before = t_hashed(before,h_cache)
end
local current = coverage.current
if current then
- rule.current = t_uncover(splitter,cache,current)
+ current = t_uncover(splitter,cache,current)
+ rule.current = t_hashed(current,h_cache)
end
local after = coverage.after
if after then
- rule.after = t_uncover(splitter,cache,after)
+ after = t_uncover(splitter,cache,after)
+ rule.after = t_hashed(after,h_cache)
end
rule.coverage = nil
end
end
- elseif format == "reversecoverage" then
+ elseif format == "reversecoverage" then -- special case, single substitution only
for i=1,#rules do
local rule = rules[i]
local reversecoverage = rule.reversecoverage
if reversecoverage then
local before = reversecoverage.before
if before then
- rule.before = t_uncover(splitter,cache,reversed(before))
+ before = t_uncover(splitter,cache,reversed(before))
+ rule.before = t_hashed(before,h_cache)
end
local current = reversecoverage.current
if current then
- rule.current = t_uncover(splitter,cache,current)
+ current = t_uncover(splitter,cache,current)
+ rule.current = t_hashed(current,h_cache)
end
local after = reversecoverage.after
if after then
- rule.after = t_uncover(splitter,cache,after)
+ after = t_uncover(splitter,cache,after)
+ rule.after = t_hashed(after,h_cache)
end
local replacements = reversecoverage.replacements
if replacements then
- rule.replacements = s_uncover(splitter,cache,replacements)
+ rule.replacements = r_uncover(splitter,cache,current,replacements)
end
rule.reversecoverage = nil
end
@@ -1151,15 +1209,18 @@ actions["reorganize lookups"] = function(data,filename,raw)
if glyphs then
local fore = glyphs.fore
if fore then
- rule.fore = s_uncover(splitter,cache,fore)
+ fore = s_uncover(splitter,cache,fore)
+ rule.before = s_hashed(fore,h_cache)
end
local back = glyphs.back
if back then
- rule.back = s_uncover(splitter,cache,back)
+ back = s_uncover(splitter,cache,back)
+ rule.after = s_hashed(back,h_cache)
end
local names = glyphs.names
if names then
- rule.names = s_uncover(splitter,cache,names)
+ names = s_uncover(splitter,cache,names)
+ rule.current = s_hashed(names,h_cache)
end
rule.glyphs = nil
end
diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua
index 17c1a92e9..81b3fd267 100644
--- a/tex/context/base/font-otn.lua
+++ b/tex/context/base/font-otn.lua
@@ -259,9 +259,9 @@ local function gref(n)
local description = descriptions[n]
local name = description and description.name
if name then
- return format("U+%04X (%s)",n,name)
+ return format("U+%05X (%s)",n,name)
else
- return format("U+%04X",n)
+ return format("U+%05X",n)
end
elseif not n then
return "<error in tracing>"
@@ -269,9 +269,9 @@ local function gref(n)
local num, nam = { }, { }
for i=1,#n do
local ni = n[i]
- if tonumber(di) then -- later we will start at 2
+ if tonumber(ni) then -- later we will start at 2
local di = descriptions[ni]
- num[i] = format("U+%04X",ni)
+ num[i] = format("U+%05X",ni)
nam[i] = di and di.name or "?"
end
end
@@ -444,6 +444,9 @@ local function multiple_glyphs(start,multiple)
end
return start, true
else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
return start, false
end
end
@@ -958,11 +961,10 @@ as less as needed but that would also mke the code even more messy.</p>
local function delete_till_stop(start,stop,ignoremarks)
if start ~= stop then
-- todo keep marks
- local done = false
- while not done do
- done = start == stop
- delete_node(start,start.next)
- end
+ repeat
+ local next = start.next
+ delete_node(start,next)
+ until next == stop
end
end
@@ -973,18 +975,19 @@ match.</p>
function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
-- todo: marks ?
- if not chainindex then
- delete_till_stop(start,stop) -- ,currentlookup.flags[1]
- end
+--~ if not chainindex then
+--~ delete_till_stop(start,stop) -- ,currentlookup.flags[1]
+--~ stop = start
+--~ end
local current = start
local subtables = currentlookup.subtables
-if #subtables > 1 then
- log_warning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
-end
+ if #subtables > 1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
while current do
if current.id == glyph_code then
local currentchar = current.char
- local lookupname = subtables[1]
+ local lookupname = subtables[1] -- only 1
local replacement = lookuphash[lookupname]
if not replacement then
if trace_bugs then
@@ -1548,7 +1551,8 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
else
-- todo: better space check (maybe check for glue)
local f, l = ck[4], ck[5]
- if f == l then
+ -- current match
+ if f == 1 and f == l then
-- already a hit
match = true
else
@@ -1600,8 +1604,8 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
-- end
end
+ -- before
if match and f > 1 then
- -- before
local prev = start.prev
if prev then
local n = f-1
@@ -1638,7 +1642,7 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
match = false break
end
prev = prev.prev
- elseif seq[n][32] then
+ elseif seq[n][32] then -- somehat special, as zapfino can have many preceding spaces
n = n -1
else
match = false break
@@ -1654,9 +1658,9 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
end
end
+ -- after
if match and s > l then
- -- after
- local current = last.next
+ local current = last and last.next
if current then
-- removed optimization for s-l == 1, we have to deal with marks anyway
local n = l + 1
@@ -1716,9 +1720,11 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
local char = start.char
if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
end
end
local chainlookups = ck[6]
@@ -1773,7 +1779,6 @@ local function normal_handle_contextchain(start,kind,chainname,contexts,sequence
end
start = start.next
until i > nofchainlookups
-
end
else
local replacements = ck[7]
@@ -1952,6 +1957,8 @@ local function featuresprocessor(head,font,attr)
featurevalue = dataset and dataset[1] -- todo: pass to function instead of using a global
if featurevalue then
local attribute, chain, typ, subtables = dataset[2], dataset[3], sequence.type, sequence.subtables
+--~ print(typ)
+--~ table.print(table.keys(sequence))
if chain < 0 then
-- this is a limited case, no special treatments like 'init' etc
local handler = handlers[typ]
@@ -2328,13 +2335,177 @@ local function split(replacement,original)
return result
end
-local function uncover(covers,result) -- will change (we can store this in the raw table)
- local nofresults = #result
- for n=1,#covers do
- nofresults = nofresults + 1
- result[nofresults] = covers[n]
- end
-end
+-- not shared as we hook into lookups now
+
+--~ local function uncover_1(covers,result) -- multiple covers
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ local u = { }
+--~ local c = covers[n]
+--~ for i=1,#c do
+--~ u[c[i]] = true
+--~ end
+--~ result[nofresults] = u
+--~ end
+--~ end
+
+--~ local function uncover_2(covers,result) -- single covers (turned into multiple with n=1)
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ result[nofresults] = { [covers[n]] = true }
+--~ end
+--~ end
+
+--~ local function uncover_1(covers,result) -- multiple covers
+--~ local nofresults = #result
+--~ for n=1,#covers do
+--~ nofresults = nofresults + 1
+--~ result[nofresults] = covers[n]
+--~ end
+--~ end
+
+--~ local function prepare_contextchains(tfmdata)
+--~ local rawdata = tfmdata.shared.rawdata
+--~ local resources = rawdata.resources
+--~ local lookuphash = resources.lookuphash
+--~ local lookups = rawdata.lookups
+--~ if lookups then
+--~ for lookupname, lookupdata in next, rawdata.lookups do
+--~ local lookuptype = lookupdata.type
+--~ if not lookuptype then
+--~ report_prepare("missing lookuptype for %s",lookupname)
+--~ else -- => lookuphash[lookupname][unicode]
+--~ local rules = lookupdata.rules
+--~ if rules then
+--~ local fmt = lookupdata.format
+--~ -- if fmt == "coverage" then
+--~ if fmt == "coverage" or fmt == "glyphs" then
+--~ if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
+--~ -- todo: dejavu-serif has one (but i need to see what use it has)
+--~ report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
+--~ else
+--~ local contexts = lookuphash[lookupname]
+--~ if not contexts then
+--~ contexts = { }
+--~ lookuphash[lookupname] = contexts
+--~ end
+--~ local t, nt = { }, 0
+--~ for nofrules=1,#rules do -- does #rules>1 happen often?
+--~ local rule = rules[nofrules]
+--~ local current = rule.current
+--~ local before = rule.before
+--~ local after = rule.after
+--~ local sequence = { }
+--~ if before then
+--~ uncover_1(before,sequence)
+--~ end
+--~ local start = #sequence + 1
+--~ uncover_1(current,sequence)
+--~ local stop = #sequence
+--~ if after then
+--~ uncover_1(after,sequence)
+--~ end
+--~ if sequence[1] then
+--~ nt = nt + 1
+--~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+--~ for unic, _ in next, sequence[start] do
+--~ local cu = contexts[unic]
+--~ if not cu then
+--~ contexts[unic] = t
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ elseif fmt == "reversecoverage" then -- we could combine both branches (only dufference is replacements)
+--~ if lookuptype ~= "reversesub" then
+--~ report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
+--~ else
+--~ local contexts = lookuphash[lookupname]
+--~ if not contexts then
+--~ contexts = { }
+--~ lookuphash[lookupname] = contexts
+--~ end
+--~ local t, nt = { }, 0
+--~ for nofrules=1,#rules do
+--~ local rule = rules[nofrules]
+--~ local current = rule.current
+--~ local before = rule.before
+--~ local after = rule.after
+--~ local replacements = rule.replacements
+--~ local sequence = { }
+--~ if before then
+--~ uncover_1(before,sequence)
+--~ end
+--~ local start = #sequence + 1
+--~ uncover_1(current,sequence)
+--~ local stop = #sequence
+--~ if after then
+--~ uncover_1(after,sequence)
+--~ end
+--~ if sequence[1] then
+--~ nt = nt + 1
+--~ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+--~ for unic, _ in next, sequence[start] do
+--~ local cu = contexts[unic]
+--~ if not cu then
+--~ contexts[unic] = t
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ -- elseif fmt == "glyphs" then --maybe just make then before = { fore } and share with coverage
+--~ -- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
+--~ -- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
+--~ -- else
+--~ -- local contexts = lookuphash[lookupname]
+--~ -- if not contexts then
+--~ -- contexts = { }
+--~ -- lookuphash[lookupname] = contexts
+--~ -- end
+--~ -- local t, nt = { }, 0
+--~ -- for nofrules=1,#rules do -- we can make glyphs a special case (less tables)
+--~ -- local rule = rules[nofrules]
+--~ -- local current = rule.names
+--~ -- local before = rule.fore
+--~ -- local after = rule.back
+--~ -- local sequence = { }
+--~ -- if before then
+--~ -- uncover_1(before,sequence)
+--~ -- end
+--~ -- local start = #sequence + 1
+--~ -- uncover_1(current,sequence)
+--~ -- local stop = #sequence
+--~ -- if after then
+--~ -- uncover_1(after,sequence)
+--~ -- end
+--~ -- if sequence then
+--~ -- nt = nt + 1
+--~ -- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
+--~ -- for unic, _ in next, sequence[start] do
+--~ -- local cu = contexts[unic]
+--~ -- if not cu then
+--~ -- contexts[unic] = t
+--~ -- end
+--~ -- end
+--~ -- end
+--~ -- end
+--~ -- end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+--~ end
+
+local valid = {
+ coverage = { chainsub = true, chainpos = true },
+ reversecoverage = { reversesub = true },
+ glyphs = { chainsub = true, chainpos = true },
+}
local function prepare_contextchains(tfmdata)
local rawdata = tfmdata.shared.rawdata
@@ -2344,122 +2515,72 @@ local function prepare_contextchains(tfmdata)
if lookups then
for lookupname, lookupdata in next, rawdata.lookups do
local lookuptype = lookupdata.type
- if not lookuptype then
- report_prepare("missing lookuptype for %s",lookupname)
- else
+ if lookuptype then
local rules = lookupdata.rules
if rules then
- local fmt = lookupdata.format
- -- lookuphash[lookupname][unicode]
- if fmt == "coverage" then -- or fmt == "class" (converted into "coverage")
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do -- does #rules>1 happen often?
- local rule = rules[nofrules]
- local current, before, after, sequence = rule.current, rule.before, rule.after, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
- end
- if sequence[1] then
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
+ local format = lookupdata.format
+ local validformat = valid[format]
+ if not validformat then
+ report_prepare("unsupported format %s",format)
+ elseif not validformat[lookuptype] then
+ -- todo: dejavu-serif has one (but i need to see what use it has)
+ report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
+ else
+ local contexts = lookuphash[lookupname]
+ if not contexts then
+ contexts = { }
+ lookuphash[lookupname] = contexts
end
- elseif fmt == "reversecoverage" then
- if lookuptype ~= "reversesub" then
- report_prepare("unsupported reverse coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current, before, after, replacements, sequence = rule.current, rule.before, rule.after, rule.replacements, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
- end
- if replacements then
- replacements = split(replacements,current[1])
- end
- if sequence[1] then
- -- this is different from normal coverage, we assume only replacements
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
+ local t, nt = { }, 0
+ for nofrules=1,#rules do
+ local rule = rules[nofrules]
+ local current = rule.current
+ local before = rule.before
+ local after = rule.after
+ local replacements = rule.replacements
+ local sequence = { }
+ local nofsequences = 0
+ -- Wventually we can store start, stop and sequence in the cached file
+ -- but then less sharing takes place so best not do that without a lot
+ -- of profiling so let's forget about it.
+ if before then
+ for n=1,#before do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = before[n]
end
end
- end
- elseif fmt == "glyphs" then --maybe just make then before = { fore } and share with coverage
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- report_prepare("unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
+ local start = nofsequences + 1
+ for n=1,#current do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = current[n]
end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current, before, after, sequence = rule.names, rule.fore, rule.back, { }
- if before then
- uncover(before,sequence)
- end
- local start = #sequence + 1
- uncover(current,sequence)
- local stop = #sequence
- if after then
- uncover(after,sequence)
+ local stop = nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences = nofsequences + 1
+ sequence[nofsequences] = after[n]
end
- if sequence[1] then
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
+ end
+ if sequence[1] then
+ -- Replacements only happen with reverse lookups as they are single only. We
+ -- could pack them into current (replacement value instead of true) and then
+ -- use sequence[start] instead but it's somewhat ugly.
+ nt = nt + 1
+ t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
+ for unic, _ in next, sequence[start] do
+ local cu = contexts[unic]
+ if not cu then
+ contexts[unic] = t
end
end
end
end
end
+ else
+ -- no rules
end
+ else
+ report_prepare("missing lookuptype for %s",lookupname)
end
end
end
diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua
index 55ddd539e..f019ade7f 100644
--- a/tex/context/base/font-otp.lua
+++ b/tex/context/base/font-otp.lua
@@ -202,14 +202,22 @@ local function packdata(data)
if rules then
for i=1,#rules do -- was next loop
local rule = rules[i]
- local r = rule.before if r then for i=1,#r do r[i] = pack(r[i],true) end end
- local r = rule.after if r then for i=1,#r do r[i] = pack(r[i],true) end end
- local r = rule.current if r then for i=1,#r do r[i] = pack(r[i],true) end end
- local r = rule.replacements if r then rule.replacements = pack(r, true) end
- local r = rule.fore if r then rule.fore = pack(r, true) end
- local r = rule.back if r then rule.back = pack(r, true) end
- local r = rule.names if r then rule.names = pack(r, true) end
- local r = rule.lookups if r then rule.lookups = pack(r) end
+--~ local r = rule.before if r then for i=1,#r do r[i] = pack(r[i],true) end end
+--~ local r = rule.after if r then for i=1,#r do r[i] = pack(r[i],true) end end
+--~ local r = rule.current if r then for i=1,#r do r[i] = pack(r[i],true) end end
+--~ local r = rule.replacements if r then rule.replacements = pack(r, true) end
+--~ local r = rule.fore if r then rule.fore = pack(r, true) end
+--~ local r = rule.back if r then rule.back = pack(r, true) end
+--~ local r = rule.names if r then rule.names = pack(r, true) end
+--~ local r = rule.lookups if r then rule.lookups = pack(r) end
+ local r = rule.before if r then for i=1,#r do r[i] = pack(r[i]) end end
+ local r = rule.after if r then for i=1,#r do r[i] = pack(r[i]) end end
+ local r = rule.current if r then for i=1,#r do r[i] = pack(r[i]) end end
+ local r = rule.replacements if r then rule.replacements = pack(r) end
+ -- local r = rule.fore if r then rule.fore = pack(r) end
+ -- local r = rule.back if r then rule.back = pack(r) end
+ -- local r = rule.names if r then rule.names = pack(r) end
+ local r = rule.lookups if r then rule.lookups = pack(r) end
end
end
end
diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua
index ae84a29c7..7cd46f962 100644
--- a/tex/context/base/lpdf-epd.lua
+++ b/tex/context/base/lpdf-epd.lua
@@ -26,6 +26,8 @@ local setmetatable, rawset = setmetatable, rawset
-- add accessor methods to the resource dict
-- a function to mark objects as to be included
+lpdf = lpdf or { }
+
local lpdf = lpdf
-- -- -- helpers -- -- --
@@ -225,6 +227,9 @@ local catalog_access = {
}
-- rawset(t,k,p)
return p
+ else
+ print(c:dictLookup(k))
+--~ return checked_access(t,k,t:dictLookup(k))
end
end
}
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index 474edfc1d..eb70fa6e6 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -211,8 +211,6 @@ function nodes.showcodes()
end
end
--- pseudoline and shape crash on node.new
-
local whatsit_node = nodecodes.whatsit
local messyhack = table.tohash { -- temporary solution
diff --git a/tex/context/base/s-abr-01.tex b/tex/context/base/s-abr-01.tex
index dc5b0475b..45c5e4297 100644
--- a/tex/context/base/s-abr-01.tex
+++ b/tex/context/base/s-abr-01.tex
@@ -52,6 +52,7 @@
\logo [CID] {cid}
\logo [CJK] {cjk}
\logo [CMR] {cmr}
+\logo [CLD] {cld}
\logo [CMYK] {cmyk}
\logo [CODHOST] {CodHost}
\logo [CONTEXT] {\ConTeXt}
diff --git a/tex/context/base/scrn-pag.mkvi b/tex/context/base/scrn-pag.mkvi
index aa1bd6999..c982eb402 100644
--- a/tex/context/base/scrn-pag.mkvi
+++ b/tex/context/base/scrn-pag.mkvi
@@ -22,65 +22,52 @@
\installparameterhandler \??sc {interactionscreen}
\installsetuphandler \??sc {interactionscreen}
-\def\scrn_canvas_synchronize_simple % this will be done differently (or disappear)
- {\begingroup
- \ifx\@@ppleft \empty
- \ifx\@@ppright \empty
- \ifx\@@pptop \empty
- \ifx\@@ppbottom \empty
- \ifx\@@pcstate\v!start
- \locationfalse\fi\else
- \locationfalse\fi\else
- \locationfalse\fi\else
- \locationfalse\fi\else
- \locationfalse\fi
- \iflocation % without screen settings
- \ctxcommand{setupcanvas{
- paperwidth = \number\paperwidth,
- paperheight = \number\paperheight
- }}%
- \else
- \ctxcommand{setupcanvas{
- paperwidth = \number\printpaperwidth,
- paperheight = \number\printpaperheight
- }}%
- \fi
- \endgroup}
+\newdimen\canvaswidth
+\newdimen\canvasheight
+\newdimen\canvasbackoffset
+\newdimen\canvastopoffset
-\def\scrn_canvas_synchronize_complex
+\def\scrn_canvas_calculate
{\begingroup
\edef\currentinteractionscreenwidth {\interactionscreenparameter\c!width }%
\edef\currentinteractionscreenheight{\interactionscreenparameter\c!height}%
+ \canvasbackoffset\backspace
+ \canvastopoffset\topoffset
\ifx\currentinteractionscreenwidth\v!fit
- \!!widtha\leftcombitotal
- \ifdim\backspace>\!!widtha
+ \global\canvaswidth\leftcombitotal
+ \ifdim\backspace>\canvaswidth
\ifdim\backspace>\zeropoint\relax
- \advance\backspace -\!!widtha
+ \global\advance\canvasbackspace -\canvaswidth
\fi
\fi
- \advance\!!widtha\dimexpr
+ \global\advance\canvaswidth\dimexpr
\rightcombitotal
+ 2\dimexpr
\interactionscreenparameter\c!backspace
+ \interactionscreenparameter\c!horoffset
\relax
\relax
+ \donetrue
\else\ifx\currentinteractionscreenwidth\v!max
- \!!widtha\printpaperwidth
+ \global\canvaswidth\printpaperwidth
+ \donetrue
\else
- \!!widtha\currentinteractionscreenwidth
- \fi\fi
- \ifdim\!!widtha>\paperwidth\ifdim\!!widtha>\zeropoint
- \global\paperwidth\!!widtha
+ \global\canvaswidth\currentinteractionscreenwidth
+ \donefalse
\fi\fi
+ \ifdone
+ \ifdim\canvaswidth>\paperwidth\ifdim\canvaswidth>\zeropoint
+ \global\paperwidth\canvaswidth % kills location=middle
+ \fi\fi
+ \fi
\ifx\currentinteractionscreenheight\v!fit
- \!!heighta\dimexpr\topheight+\topdistance\relax
- \ifdim\topspace>\!!heighta
+ \global\canvasheight\dimexpr\topheight+\topdistance\relax
+ \ifdim\topspace>\canvasheight
\ifdim\topspace>\zeropoint\relax
- \advance\topspace -\!!heighta
+ \global\advance\canvastopspace -\canvasheight
\fi
\fi
- \advance\!!heighta\dimexpr
+ \global\advance\canvasheight\dimexpr
\makeupheight
+ \bottomdistance
+ \bottomheight
@@ -89,26 +76,64 @@
+ \interactionscreenparameter\c!veroffset
\relax
\relax
+ \donetrue
\else\ifx\currentinteractionscreenheight\v!max
- \!!heighta\printpaperheight
+ \global\canvasheight\printpaperheight
+ \donetrue
\else
- \!!heighta\currentinteractionscreenheight
- \fi\fi
- \ifdim\!!heighta>\paperheight\ifdim\!!heighta>\zeropoint
- \global\paperheight\!!heighta
+ \global\canvasheight\currentinteractionscreenheight
+ \donefalse
\fi\fi
- \ctxcommand{setupcanvas{
+ \ifdone
+ \ifdim\canvasheight>\paperheight\ifdim\canvasheight>\zeropoint
+ \global\paperheight\canvasheight % kills location=middle
+ \fi\fi
+ \fi
+ \endgroup}
+
+\appendtoks
+ \ifproductionrun
+ \scrn_canvas_calculate
+ \fi
+\to \everysetupinteractionscreen
+
+\def\scrn_canvas_synchronize_simple % this will be done differently (or disappear)
+ {\begingroup
+ \ifx\@@ppleft \empty
+ \ifx\@@ppright \empty
+ \ifx\@@pptop \empty
+ \ifx\@@ppbottom \empty
+ \ifx\@@pcstate\v!start
+ \locationfalse\fi\else
+ \locationfalse\fi\else
+ \locationfalse\fi\else
+ \locationfalse\fi\else
+ \locationfalse\fi
+ \iflocation % without screen settings
+ \ctxcommand{setupcanvas{
+ paperwidth = \number\paperwidth,
+ paperheight = \number\paperheight
+ }}%
+ \else
+ \ctxcommand{setupcanvas{
+ paperwidth = \number\printpaperwidth,
+ paperheight = \number\printpaperheight
+ }}%
+ \fi
+ \endgroup}
+
+\def\scrn_canvas_synchronize_complex
+ {\ctxcommand{setupcanvas{
mode = "\interactionscreenparameter\c!option",
singlesided = \ifsinglesided true\else false\fi,
doublesided = \ifdoublesided true\else false\fi,
- leftoffset = \number\dimexpr\backoffset\relax,
- topoffset = \number\dimexpr\topoffset \relax,
- width = \number\dimexpr\!!widtha \relax,
- height = \number\dimexpr\!!heighta \relax,
+ leftoffset = \number\dimexpr\canvasbackoffset\relax,
+ topoffset = \number\dimexpr\canvastopoffset\relax,
+ width = \number\dimexpr\canvaswidth\relax,
+ height = \number\dimexpr\canvasheight\relax,
paperwidth = \number\paperwidth,
paperheight = \number\paperheight
- }}%
- \endgroup}
+ }}}
\let\scrn_canvas_synchronize\scrn_canvas_synchronize_simple
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 1020d10ae..77031a786 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 98d73a459..405fea2c5 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua
index 6eeb2f1d5..27b5f17f2 100644
--- a/tex/context/base/trac-set.lua
+++ b/tex/context/base/trac-set.lua
@@ -82,7 +82,7 @@ local function set(t,what,newvalue)
else
value = is_boolean(value,value)
end
- w = escapedpattern(w,true)
+ w = "^" .. escapedpattern(w,true) .. "$" -- new: anchored
for name, functions in next, data do
if done[name] then
-- prevent recursion due to wildcards
@@ -297,11 +297,11 @@ local flags = environment and environment.engineflags
if flags then
if trackers and flags.trackers then
- setters.initialize("flags","trackers", utilities.parsers.settings_to_hash(flags.trackers))
+ setters.initialize("flags","trackers", settings_to_hash(flags.trackers))
-- t_enable(flags.trackers)
end
if directives and flags.directives then
- setters.initialize("flags","directives", utilities.parsers.settings_to_hash(flags.directives))
+ setters.initialize("flags","directives", settings_to_hash(flags.directives))
-- d_enable(flags.directives)
end
end
diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua
index 5eabbc8c4..ce55de5c7 100644
--- a/tex/context/base/util-deb.lua
+++ b/tex/context/base/util-deb.lua
@@ -150,7 +150,7 @@ function inspect(i) -- global function
if ti == "table" then
table.print(i,"table")
elseif is_node and is_node(i) then
- print(node.sequenced(i))
+ table.print(nodes.astable(i),tostring(i))
else
print(tostring(i))
end