] | {M}+[N]+[H]>]+[SM]+[(VD)]
- current = analyze_next_chars_two(current,font) -- not c !
- syllableend = current
- end
- end
- end
- if syllableend then
- syllabe = syllabe + 1
- local c = syllablestart
- local n = getnext(syllableend)
- while c ~= n do
- setprop(c,a_syllabe,syllabe)
- c = getnext(c)
- end
- end
- if syllableend and syllablestart ~= syllableend then
- head, current, nbspaces = dev2_reorder(head,syllablestart,syllableend,font,attr,nbspaces)
- end
- if not syllableend and getid(current) == glyph_code and getsubtype(current) < 256 and getfont(current) == font and not getprop(current,a_state) then
- local mark = mark_four[getchar(current)]
- if mark then
- head, current = inject_syntax_error(head,current,mark)
- end
- end
- start = false
- current = getnext(current)
- end
-
- if nbspaces > 0 then
- head = replace_all_nbsp(head)
- end
-
- head = tonode(head)
-
- return head, done
-end
-
-methods.mlym = methods.deva
-methods.mlm2 = methods.dev2
diff --git a/tex/context/base/mkiv/font-off.lua b/tex/context/base/mkiv/font-off.lua
index b8fadb634..7e509c2c3 100644
--- a/tex/context/base/mkiv/font-off.lua
+++ b/tex/context/base/mkiv/font-off.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['font-off'] = {
license = "see context related readme files"
}
+local tonumber = tonumber
local lower = string.lower
local round = math.round
local setmetatableindex = table.setmetatableindex
diff --git a/tex/context/base/mkiv/font-one.lua b/tex/context/base/mkiv/font-one.lua
index d9b9c65df..a3dc7b038 100644
--- a/tex/context/base/mkiv/font-one.lua
+++ b/tex/context/base/mkiv/font-one.lua
@@ -22,12 +22,11 @@ add features.
local fonts, logs, trackers, containers, resolvers = fonts, logs, trackers, containers, resolvers
local next, type, tonumber, rawget = next, type, tonumber, rawget
-local match, gmatch, lower, gsub, strip, find = string.match, string.gmatch, string.lower, string.gsub, string.strip, string.find
-local char, byte, sub = string.char, string.byte, string.sub
+local match, gsub = string.match, string.gsub
local abs = math.abs
-local bxor, rshift = bit32.bxor, bit32.rshift
local P, S, R, Cmt, C, Ct, Cs, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.Cmt, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg
local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local sortedhash = table.sortedhash
local trace_features = false trackers.register("afm.features", function(v) trace_features = v end)
local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end)
@@ -41,6 +40,8 @@ local derivetable = table.derive
local findbinfile = resolvers.findbinfile
+local privateoffset = fonts.constructors and fonts.constructors.privateoffset or 0xF0000 -- 0x10FFFF
+
local definers = fonts.definers
local readers = fonts.readers
local constructors = fonts.constructors
@@ -58,7 +59,7 @@ local registerafmfeature = afmfeatures.register
local afmenhancers = constructors.enhancers.afm
local registerafmenhancer = afmenhancers.register
-afm.version = 1.512 -- incrementing this number one up will force a re-cache
+afm.version = 1.513 -- incrementing this number one up will force a re-cache
afm.cache = containers.define("fonts", "one", afm.version, true)
afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*)
@@ -139,9 +140,9 @@ local function enhance_unify_names(data, filename)
local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
local unicodes = { }
local names = { }
- local private = constructors.privateoffset
+ local private = data.private or privateoffset
local descriptions = data.descriptions
- for name, blob in next, data.characters do
+ for name, blob in sortedhash(data.characters) do -- sorting is nicer for privates
local code = unicodevector[name] -- or characters.name_to_unicode[name]
if not code then
code = lpegmatch(uparser,name)
@@ -179,13 +180,13 @@ local function enhance_unify_names(data, filename)
end
end
data.characters = nil
+ data.private = private
local resources = data.resources
- local filename = resources.filename or file.removesuffix(file.basename(filename))
+ local filename = resources.filename or file.removesuffix(file.basename(filename))
resources.filename = resolvers.unresolve(filename) -- no shortcut
resources.unicodes = unicodes -- name to unicode
- resources.marks = { } -- todo
- -- resources.names = names -- name to index
- resources.private = private
+ resources.marks = { } -- todo
+ -- resources.names = names -- name to index
end
local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
@@ -587,6 +588,7 @@ local function copytotfm(data)
properties.fullname = fullname
properties.psname = fullname
properties.name = filename or fullname or fontname
+ properties.private = properties.private or data.private or privateoffset
--
if next(characters) then
return {
diff --git a/tex/context/base/mkiv/font-onr.lua b/tex/context/base/mkiv/font-onr.lua
index 85d3604b7..26a782649 100644
--- a/tex/context/base/mkiv/font-onr.lua
+++ b/tex/context/base/mkiv/font-onr.lua
@@ -41,7 +41,7 @@ handlers.afm = afm
local readers = afm.readers or { }
afm.readers = readers
-afm.version = 1.512 -- incrementing this number one up will force a re-cache
+afm.version = 1.513 -- incrementing this number one up will force a re-cache
--[[ldx--
We start with the basic reader which we give a name similar to the built in
@@ -89,7 +89,7 @@ do
local dup = P("dup")
local put = P("put")
local array = P("array")
- local name = P("/") * C((R("az")+R("AZ")+R("09")+S("-_."))^1)
+ local name = P("/") * C((R("az","AZ","09")+S("-_."))^1)
local digits = R("09")^1
local cardinal = digits / tonumber
local spaces = P(" ")^1
@@ -103,32 +103,40 @@ do
return position + 1
end
- local setroutine = function(str,position,index,size)
+ local setroutine = function(str,position,index,size,filename)
local forward = position + tonumber(size)
local stream = decrypt(sub(str,position+1,forward),4330,4)
routines[index] = { byte(stream,1,#stream) }
return forward
end
- local setvector = function(str,position,name,size)
+ local setvector = function(str,position,name,size,filename)
local forward = position + tonumber(size)
if n >= m then
return #str
elseif forward < #str then
+ if n == 0 and name ~= ".notdef" then
+ report_pfb("reserving .notdef at index 0 in %a",filename) -- luatex needs that
+ n = n + 1
+ end
vector[n] = name
- n = n + 1 -- we compensate for notdef at the cff loader end
+ n = n + 1
return forward
else
return #str
end
end
- local setshapes = function(str,position,name,size)
+ local setshapes = function(str,position,name,size,filename)
local forward = position + tonumber(size)
local stream = sub(str,position+1,forward)
if n > m then
return #str
elseif forward < #str then
+ if n == 0 and name ~= ".notdef" then
+ report_pfb("reserving .notdef at index 0 in %a",filename) -- luatex needs that
+ n = n + 1
+ end
vector[n] = name
n = n + 1
chars [n] = decrypt(stream,4330,4)
@@ -144,15 +152,15 @@ do
local p_filterroutines = -- dup RD or -| NP or |
(1-subroutines)^0 * subroutines * spaces * Cmt(cardinal,initialize)
- * (Cmt(cardinal * spaces * cardinal * p_rd, setroutine) * p_np + P(1))^1
+ * (Cmt(cardinal * spaces * cardinal * p_rd * Carg(1), setroutine) * p_np + P(1))^1
local p_filtershapes = -- /foo RD ND
(1-charstrings)^0 * charstrings * spaces * Cmt(cardinal,initialize)
- * (Cmt(name * spaces * cardinal * p_rd, setshapes) * p_nd + P(1))^1
+ * (Cmt(name * spaces * cardinal * p_rd * Carg(1) , setshapes) * p_nd + P(1))^1
local p_filternames = Ct (
(1-charstrings)^0 * charstrings * spaces * Cmt(cardinal,initialize)
- * (Cmt(name * spaces * cardinal, setvector) + P(1))^1
+ * (Cmt(name * spaces * cardinal * Carg(1), setvector) + P(1))^1
)
-- /Encoding 256 array
@@ -177,7 +185,7 @@ do
return
end
- if not (find(data,"!PS%-AdobeFont%-") or find(data,"%%!FontType1")) then
+ if not (find(data,"!PS-AdobeFont-",1,true) or find(data,"%!FontType1",1,true)) then
report_pfb("no font in %a",filename)
return
end
@@ -196,10 +204,9 @@ do
local glyphs = { }
routines, vector, chars = { }, { }, { }
-
if shapestoo then
- lpegmatch(p_filterroutines,binary)
- lpegmatch(p_filtershapes,binary)
+ lpegmatch(p_filterroutines,binary,1,filename)
+ lpegmatch(p_filtershapes,binary,1,filename)
local data = {
dictionaries = {
{
@@ -211,7 +218,7 @@ do
}
fonts.handlers.otf.readers.parsecharstrings(false,data,glyphs,true,true)
else
- lpegmatch(p_filternames,binary)
+ lpegmatch(p_filternames,binary,1,filename)
end
names = vector
@@ -233,7 +240,7 @@ do
if trace_loading then
report_afm("getting index data from %a",pfbname)
end
- for index=1,#vector do
+ for index=0,#vector do -- hm, zero, often space or notdef
local name = vector[index]
local char = characters[name]
if char then
@@ -241,6 +248,10 @@ do
report_afm("glyph %a has index %a",name,index)
end
char.index = index
+ else
+ if trace_indexing then
+ report_afm("glyph %a has index %a but no data",name,index)
+ end
end
end
end
@@ -410,10 +421,6 @@ local fullparser = ( P("StartFontMetrics") * fontdata * name / start )
* ( p_charmetrics + p_kernpairs + p_parameters + (1-P("EndFontMetrics")) )^0
* ( P("EndFontMetrics") / stop )
-local fullparser = ( P("StartFontMetrics") * fontdata * name / start )
- * ( p_charmetrics + p_kernpairs + p_parameters + (1-P("EndFontMetrics")) )^0
- * ( P("EndFontMetrics") / stop )
-
local infoparser = ( P("StartFontMetrics") * fontdata * name / start )
* ( p_parameters + (1-P("EndFontMetrics")) )^0
* ( P("EndFontMetrics") / stop )
diff --git a/tex/context/base/mkiv/font-osd.lua b/tex/context/base/mkiv/font-osd.lua
index ca20f6782..04fbf88f5 100644
--- a/tex/context/base/mkiv/font-osd.lua
+++ b/tex/context/base/mkiv/font-osd.lua
@@ -6,13 +6,6 @@ if not modules then modules = { } end modules ['font-osd'] = { -- script devanag
license = "see context related readme files"
}
--- I'll optimize this one with ischar (much faster) when I see a reason (read: I need a
--- proper test case first).
-
--- This is a version of font-odv.lua adapted to the new font loader and more
--- direct hashing. The initialization code has been adapted (more efficient). One day
--- I'll speed this up ... char swapping and properties.
-
-- A few remarks:
--
-- This code is a partial rewrite of the code that deals with devanagari. The data and logic
@@ -29,7 +22,8 @@ if not modules then modules = { } end modules ['font-osd'] = { -- script devanag
-- much can get messed up in over a week work) it could be that I introduced bugs. There
-- is more to gain (esp in the functions applied to a range) but I'll do that when
-- everything works as expected. Kai's original code is kept in font-odk.lua as a reference
--- so blame me (HH) for bugs.
+-- so blame me (HH) for bugs. (We no longer ship that file as the code below has diverted
+-- too much and in the meantime has more than doubled in size.)
--
-- Interesting is that Kai managed to write this on top of the existing otf handler. Only a
-- few extensions were needed, like a few more analyzing states and dealing with changed
@@ -467,7 +461,6 @@ local sequence_reorder_matras = {
nofsteps = 1,
steps = {
{
- osdstep = true,
coverage = pre_mark,
}
}
@@ -482,7 +475,6 @@ local sequence_reorder_reph = {
nofsteps = 1,
steps = {
{
- osdstep = true,
coverage = { },
}
}
@@ -497,7 +489,6 @@ local sequence_reorder_pre_base_reordering_consonants = {
nofsteps = 1,
steps = {
{
- osdstep = true,
coverage = { },
}
}
@@ -511,7 +502,7 @@ local sequence_remove_joiners = {
type = "devanagari_remove_joiners",
nofsteps = 1,
steps = {
- { osdstep = true,
+ {
coverage = both_joiners_true,
},
}
@@ -639,35 +630,22 @@ local function initializedevanagi(tfmdata)
if coverage then
local reph = false
if kind == "rphf" then
- --
- -- KE: I don't understand the rationale behind osdstep. The original if
- -- statement checked whether coverage is contextual chaining.
- --
- -- HH: The osdstep signals that we deal with our own feature here, not
- -- one in the font itself so it was just a safeguard against us overloading
- -- something driven by the font.
- --
- -- if step.osdstep then -- selective
- if true then -- always
- -- rphf acts on consonant + halant
- for k, v in next, ra do
- local r = coverage[k]
- if r then
- local h = false
- for k, v in next, halant do
- local h = r[k]
- if h then
- reph = h.ligature or false
- break
- end
- end
- if reph then
+ -- rphf acts on consonant + halant
+ for k, v in next, ra do
+ local r = coverage[k]
+ if r then
+ local h = false
+ for k, v in next, halant do
+ local h = r[k]
+ if h then
+ reph = h.ligature or false
break
end
end
+ if reph then
+ break
+ end
end
- else
- -- rphf might be result of other handler/chainproc
end
end
seqsubset[#seqsubset+1] = { kind, coverage, reph }
@@ -1140,7 +1118,7 @@ function handlers.devanagari_reorder_matras(head,start) -- no leak
head = remove_node(head,start)
setlink(start,next)
setlink(current,start)
- -- setlink(current,start,next) -- maybe
+ -- setlink(current,start,next) -- maybe
start = startnext
break
end
@@ -1200,7 +1178,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(start,next)
setlink(current,start)
- -- setlink(current,start,next) -- maybe
+ -- setlink(current,start,next) -- maybe
start = startnext
startattr = getprop(start,a_syllabe)
break
@@ -1220,7 +1198,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(getprev(current),start)
setlink(start,current)
- -- setlink(getprev(current),start,current) -- maybe
+ -- setlink(getprev(current),start,current) -- maybe
start = startnext
startattr = getprop(start,a_syllabe)
break
@@ -1254,7 +1232,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(getprev(c),start)
setlink(start,c)
- -- setlink(getprev(c),start,c) -- maybe
+ -- setlink(getprev(c),start,c) -- maybe
-- end
start = startnext
startattr = getprop(start,a_syllabe)
@@ -1278,7 +1256,7 @@ function handlers.devanagari_reorder_reph(head,start)
head = remove_node(head,start)
setlink(start,getnext(current))
setlink(current,start)
- -- setlink(current,start,getnext(current)) -- maybe
+ -- setlink(current,start,getnext(current)) -- maybe
start = startnext
end
end
@@ -1295,7 +1273,11 @@ end
-- 2 Try to find a target position the same way as for pre-base matra. If it is found, reorder pre-base consonant glyph.
-- 3 If position is not found, reorder immediately before main consonant.
--- UNTESTED: NOT CALLED IN EXAMPLE
+-- Here we implement a few handlers:
+--
+-- function(head,start,dataset,sequence,lookupmatch,rlmode,skiphash,step)
+-- return head, start, done
+-- end
function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start)
local current = start
@@ -1322,7 +1304,7 @@ function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start)
removenode(start,start)
setlink(start,next)
setlink(current,start)
- -- setlink(current,start,next) -- maybe
+ -- setlink(current,start,next) -- maybe
start = startnext
break
end
@@ -1355,32 +1337,6 @@ function handlers.devanagari_reorder_pre_base_reordering_consonants(head,start)
return head, start, true
end
--- function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
--- local stop = getnext(start)
--- local font = getfont(start)
--- while stop do
--- local char = ischar(stop)
--- if char and (char == c_zwnj or char == c_zwj) then
--- stop = getnext(stop)
--- else
--- break
--- end
--- end
--- if stop then
--- setnext(getprev(stop))
--- setprev(stop,getprev(start))
--- end
--- local prev = getprev(start)
--- if prev then
--- setnext(prev,stop)
--- end
--- if head == start then
--- head = stop
--- end
--- flush_list(start)
--- return head, stop, true
--- end
-
function handlers.devanagari_remove_joiners(head,start,kind,lookupname,replacement)
local stop = getnext(start)
local font = getfont(start)
@@ -1592,7 +1548,7 @@ local function dev2_reorder(head,start,stop,font,attr,nbspaces) -- maybe do a pa
next = getnext(current)
local tmp = getnext(next)
local changestop = next == stop
- setnext(next,nil)
+ setnext(next)
setprop(current,a_state,s_pref)
current = processcharacters(current,font)
setprop(current,a_state,s_blwf)
diff --git a/tex/context/base/mkiv/font-ota.lua b/tex/context/base/mkiv/font-ota.lua
index 232c2586a..9c6ee7403 100644
--- a/tex/context/base/mkiv/font-ota.lua
+++ b/tex/context/base/mkiv/font-ota.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-ota'] = {
version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -71,7 +71,7 @@ local s_isol = 4 local s_blwf = 10
local s_mark = 5 local s_pstf = 11
local s_rest = 6
-local states = {
+local states = allocate {
init = s_init,
medi = s_medi,
med2 = s_medi,
@@ -88,7 +88,7 @@ local states = {
pstf = s_pstf,
}
-local features = {
+local features = allocate {
init = s_init,
medi = s_medi,
med2 = s_medi,
@@ -247,7 +247,7 @@ local function warning(current,what)
end
end
-local mappers = {
+local mappers = allocate {
l = s_init, -- left
d = s_medi, -- double
c = s_medi, -- joiner
diff --git a/tex/context/base/mkiv/font-otb.lua b/tex/context/base/mkiv/font-otb.lua
deleted file mode 100644
index a31079225..000000000
--- a/tex/context/base/mkiv/font-otb.lua
+++ /dev/null
@@ -1,705 +0,0 @@
-if not modules then modules = { } end modules ['font-otb'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-local concat = table.concat
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring, rawget = type, next, tonumber, tostring, rawget
-
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local otffeatures = otf.features
-local registerotffeature = otffeatures.register
-
-otf.defaultbasealternate = "none" -- first last
-
-local wildcard = "*"
-local default = "dflt"
-
-local formatters = string.formatters
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
-
-local function gref(descriptions,n)
- if type(n) == "number" then
- local name = descriptions[n].name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam, j = { }, { }, 0
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- first is likely a key
- j = j + 1
- local di = descriptions[ni]
- num[j] = f_unicode(ni)
- nam[j] = di and di.name or "-"
- end
- end
- return f_unilist(num,nam)
- else
- return ""
- end
-end
-
-local function cref(feature,lookuptags,lookupname)
- if lookupname then
- return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname])
- else
- return formatters["feature %a"](feature)
- end
-end
-
-local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,unicode),
- replacement and gref(descriptions,replacement),
- value,
- comment)
-end
-
-local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %S",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,unicode),
- gref(descriptions,substitution))
-end
-
-local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature)
- report_prepare("%s: base ligature %s => %S",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,ligature),
- gref(descriptions,unicode))
-end
-
-local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value)
- report_prepare("%s: base kern %s + %s => %S",
- cref(feature,lookuptags,lookupname),
- gref(descriptions,unicode),
- gref(descriptions,otherunicode),
- value)
-end
-
-local basemethods = { }
-local basemethod = ""
-
-local function applybasemethod(what,...)
- local m = basemethods[basemethod][what]
- if m then
- return m(...)
- end
-end
-
--- We need to make sure that luatex sees the difference between
--- base fonts that have different glyphs in the same slots in fonts
--- that have the same fullname (or filename). LuaTeX will merge fonts
--- eventually (and subset later on). If needed we can use a more
--- verbose name as long as we don't use <()<>[]{}/%> and the length
--- is < 128.
-
-local basehash, basehashes, applied = { }, 1, { }
-
-local function registerbasehash(tfmdata)
- local properties = tfmdata.properties
- local hash = concat(applied," ")
- local base = basehash[hash]
- if not base then
- basehashes = basehashes + 1
- base = basehashes
- basehash[hash] = base
- end
- properties.basehash = base
- properties.fullname = properties.fullname .. "-" .. base
- -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash)
- applied = { }
-end
-
-local function registerbasefeature(feature,value)
- applied[#applied+1] = feature .. "=" .. tostring(value)
-end
-
--- The original basemode ligature builder used the names of components
--- and did some expression juggling to get the chain right. The current
--- variant starts with unicodes but still uses names to make the chain.
--- This is needed because we have to create intermediates when needed
--- but use predefined snippets when available. To some extend the
--- current builder is more stupid but I don't worry that much about it
--- as ligatures are rather predicatable.
---
--- Personally I think that an ff + i == ffi rule as used in for instance
--- latin modern is pretty weird as no sane person will key that in and
--- expect a glyph for that ligature plus the following character. Anyhow,
--- as we need to deal with this, we do, but no guarantes are given.
---
--- latin modern dejavu
---
--- f+f 102 102 102 102
--- f+i 102 105 102 105
--- f+l 102 108 102 108
--- f+f+i 102 102 105
--- f+f+l 102 102 108 102 102 108
--- ff+i 64256 105 64256 105
--- ff+l 64256 108
---
--- As you can see here, latin modern is less complete than dejavu but
--- in practice one will not notice it.
---
--- The while loop is needed because we need to resolve for instance
--- pseudo names like hyphen_hyphen to endash so in practice we end
--- up with a bit too many definitions but the overhead is neglectable.
---
--- We can have changed[first] or changed[second] but it quickly becomes
--- messy if we need to take that into account.
-
-local trace = false
-
-local function finalize_ligatures(tfmdata,ligatures)
- local nofligatures = #ligatures
- if nofligatures > 0 then
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes -- we use rawget in order to avoid bulding the table
- local private = resources.private
- local alldone = false
- while not alldone do
- local done = 0
- for i=1,nofligatures do
- local ligature = ligatures[i]
- if ligature then
- local unicode, lookupdata = ligature[1], ligature[2]
- if trace_ligatures_detail then
- report_prepare("building % a into %a",lookupdata,unicode)
- end
- local size = #lookupdata
- local firstcode = lookupdata[1] -- [2]
- local firstdata = characters[firstcode]
- local okay = false
- if firstdata then
- local firstname = "ctx_" .. firstcode
- for i=1,size-1 do -- for i=2,size-1 do
- local firstdata = characters[firstcode]
- if not firstdata then
- firstcode = private
- if trace_ligatures_detail then
- report_prepare("defining %a as %a",firstname,firstcode)
- end
- unicodes[firstname] = firstcode
- firstdata = { intermediate = true, ligatures = { } }
- characters[firstcode] = firstdata
- descriptions[firstcode] = { name = firstname }
- private = private + 1
- end
- local target
- local secondcode = lookupdata[i+1]
- local secondname = firstname .. "_" .. secondcode
- if i == size - 1 then
- target = unicode
- if not rawget(unicodes,secondname) then
- unicodes[secondname] = unicode -- map final ligature onto intermediates
- end
- okay = true
- else
- target = rawget(unicodes,secondname)
- if not target then
- break
- end
- end
- if trace_ligatures_detail then
- report_prepare("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
- end
- local firstligs = firstdata.ligatures
- if firstligs then
- firstligs[secondcode] = { char = target }
- else
- firstdata.ligatures = { [secondcode] = { char = target } }
- end
- firstcode = target
- firstname = secondname
- end
- elseif trace_ligatures_detail then
- report_prepare("no glyph (%a,%a) for building %a",firstname,firstcode,target)
- end
- if okay then
- ligatures[i] = false
- done = done + 1
- end
- end
- end
- alldone = done == 0
- end
- if trace_ligatures_detail then
- for k, v in table.sortedhash(characters) do
- if v.ligatures then
- table.print(v,k)
- end
- end
- end
- resources.private = private
- return true
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local properties = tfmdata.properties
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
- local lookuptags = resources.lookuptags
-
- local ligatures = { }
- local alternate = tonumber(value) or true and 1
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- local actions = {
- substitution = function(lookupdata,lookuptags,lookupname,description,unicode)
- if trace_singles then
- report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
- end
- changed[unicode] = lookupdata
- end,
- alternate = function(lookupdata,lookuptags,lookupname,description,unicode)
- local replacement = lookupdata[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = lookupdata[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = lookupdata[#data]
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- end,
- ligature = function(lookupdata,lookuptags,lookupname,description,unicode)
- if trace_ligatures then
- report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata)
- end
- ligatures[#ligatures+1] = { unicode, lookupdata }
- end,
- }
-
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local lookups = description.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookups[lookupname]
- if lookupdata then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- action(lookupdata,lookuptags,lookupname,description,unicode)
- end
- end
- end
- end
- local lookups = description.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookuplist = lookups[lookupname]
- if lookuplist then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- for i=1,#lookuplist do
- action(lookuplist[i],lookuptags,lookupname,description,unicode)
- end
- end
- end
- end
- end
- end
- properties.hasligatures = finalize_ligatures(tfmdata,ligatures)
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local properties = tfmdata.properties
- local lookuptags = resources.lookuptags
- local sharedkerns = { }
- local traceindeed = trace_baseinit and trace_kerns
- local haskerns = false
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local rawkerns = description.kerns -- shared
- if rawkerns then
- local s = sharedkerns[rawkerns]
- if s == false then
- -- skip
- elseif s then
- character.kerns = s
- else
- local newkerns = character.kerns
- local done = false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = rawkerns[lookup]
- if kerns then
- for otherunicode, value in next, kerns do
- if value == 0 then
- -- maybe no 0 test here
- elseif not newkerns then
- newkerns = { [otherunicode] = value }
- done = true
- if traceindeed then
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
- end
- elseif not newkerns[otherunicode] then -- first wins
- newkerns[otherunicode] = value
- done = true
- if traceindeed then
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[rawkerns] = newkerns
- character.kerns = newkerns -- no empty assignments
- haskerns = true
- else
- sharedkerns[rawkerns] = false
- end
- end
- end
- end
- properties.haskerns = haskerns
-end
-
-basemethods.independent = {
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-local function makefake(tfmdata,name,present)
- local resources = tfmdata.resources
- local private = resources.private
- local character = { intermediate = true, ligatures = { } }
- resources.unicodes[name] = private
- tfmdata.characters[private] = character
- tfmdata.descriptions[private] = { name = name }
- resources.private = private + 1
- present[name] = private
- return character
-end
-
-local function make_1(present,tree,name)
- for k, v in next, tree do
- if k == "ligature" then
- present[name] = v
- else
- make_1(present,v,name .. "_" .. k)
- end
- end
-end
-
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname)
- for k, v in next, tree do
- if k == "ligature" then
- local character = characters[preceding]
- if not character then
- if trace_baseinit then
- report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding)
- end
- character = makefake(tfmdata,name,present)
- end
- local ligatures = character.ligatures
- if ligatures then
- ligatures[unicode] = { char = v }
- else
- character.ligatures = { [unicode] = { char = v } }
- end
- if done then
- local d = done[lookupname]
- if not d then
- done[lookupname] = { "dummy", v }
- else
- d[#d+1] = v
- end
- end
- else
- local code = present[name] or unicode
- local name = name .. "_" .. k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname)
- end
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
- local lookuptags = resources.lookuptags
-
- local ligatures = { }
- local alternate = tonumber(value) or true and 1
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- local lookuptype = lookuptypes[lookupname]
- for unicode, data in next, lookupdata do
- if lookuptype == "substitution" then
- if trace_singles then
- report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data)
- end
- changed[unicode] = data
- elseif lookuptype == "alternate" then
- local replacement = data[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = data[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = data[#data]
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- elseif lookuptype == "ligature" then
- ligatures[#ligatures+1] = { unicode, data, lookupname }
- if trace_ligatures then
- report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data)
- end
- end
- end
- end
-
- local nofligatures = #ligatures
-
- if nofligatures > 0 then
-
- local characters = tfmdata.characters
- local present = { }
- local done = trace_baseinit and trace_ligatures and { }
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree = ligature[1], ligature[2]
- make_1(present,tree,"ctx_"..unicode)
- end
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname)
- end
-
- end
-
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local properties = tfmdata.properties
- local lookuphash = resources.lookuphash
- local lookuptags = resources.lookuptags
- local traceindeed = trace_baseinit and trace_kerns
- -- check out this sharedkerns trickery
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- for unicode, data in next, lookupdata do
- local character = characters[unicode]
- local kerns = character.kerns
- if not kerns then
- kerns = { }
- character.kerns = kerns
- end
- if traceindeed then
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern)
- end
- end
- else
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- end
- end
- end
- end
- end
-
-end
-
-local function initializehashes(tfmdata)
- nodeinitializers.features(tfmdata)
-end
-
-basemethods.shared = {
- initializehashes = initializehashes,
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-basemethod = "independent"
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- local starttime = trace_preparing and os.clock()
- local features = tfmdata.shared.features
- local fullname = tfmdata.properties.fullname or "?"
- if features then
- applybasemethod("initializehashes",tfmdata)
- local collectlookups = otf.collectlookups
- local rawdata = tfmdata.shared.rawdata
- local properties = tfmdata.properties
- local script = properties.script -- or "dflt" -- can be nil
- local language = properties.language -- or "dflt" -- can be nil
- local basesubstitutions = rawdata.resources.features.gsub
- local basepositionings = rawdata.resources.features.gpos
- --
- -- if basesubstitutions then
- -- for feature, data in next, basesubstitutions do
- -- local value = features[feature]
- -- if value then
- -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- -- if validlookups then
- -- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- -- registerbasefeature(feature,value)
- -- end
- -- end
- -- end
- -- end
- -- if basepositionings then
- -- for feature, data in next, basepositionings do
- -- local value = features[feature]
- -- if value then
- -- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- -- if validlookups then
- -- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- -- registerbasefeature(feature,value)
- -- end
- -- end
- -- end
- -- end
- --
- if basesubstitutions or basepositionings then
- local sequences = tfmdata.resources.sequences
- for s=1,#sequences do
- local sequence = sequences[s]
- local sfeatures = sequence.features
- if sfeatures then
- local order = sequence.order
- if order then
- for i=1,#order do --
- local feature = order[i]
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if not validlookups then
- -- skip
- elseif basesubstitutions and basesubstitutions[feature] then
- if trace_preparing then
- report_prepare("filtering base %s feature %a for %a with value %a","sub",feature,fullname,value)
- end
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- elseif basepositionings and basepositionings[feature] then
- if trace_preparing then
- report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
- end
- applybasemethod("preparepositionings",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- end
- end
- end
- --
- registerbasehash(tfmdata)
- end
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,fullname)
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
- base = featuresinitializer,
- }
-}
-
--- independent : collect lookups independently (takes more runtime ... neglectable)
--- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
-
-directives.register("fonts.otf.loader.basemethod", function(v)
- if basemethods[v] then
- basemethod = v
- end
-end)
diff --git a/tex/context/base/mkiv/font-otc.lua b/tex/context/base/mkiv/font-otc.lua
index 5d879ec1d..2bad62d60 100644
--- a/tex/context/base/mkiv/font-otc.lua
+++ b/tex/context/base/mkiv/font-otc.lua
@@ -1,6 +1,6 @@
if not modules then modules = { } end modules ['font-otc'] = {
version = 1.001,
- comment = "companion to font-otf.lua (context)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files"
@@ -10,6 +10,8 @@ local format, insert, sortedkeys, tohash = string.format, table.insert, table.so
local type, next = type, next
local lpegmatch = lpeg.match
local utfbyte, utflen, utfsplit = utf.byte, utf.len, utf.split
+local match = string.match
+local sortedhash = table.sortedhash
-- we assume that the other otf stuff is loaded already
@@ -21,6 +23,10 @@ local otf = fonts.handlers.otf
local registerotffeature = otf.features.register
local setmetatableindex = table.setmetatableindex
+local checkmerge = fonts.helpers.checkmerge
+local checkflags = fonts.helpers.checkflags
+local checksteps = fonts.helpers.checksteps
+
local normalized = {
substitution = "substitution",
single = "substitution",
@@ -29,6 +35,7 @@ local normalized = {
multiple = "multiple",
kern = "kern",
pair = "pair",
+ single = "single",
chainsubstitution = "chainsubstitution",
chainposition = "chainposition",
}
@@ -40,6 +47,7 @@ local types = {
multiple = "gsub_multiple",
kern = "gpos_pair",
pair = "gpos_pair",
+ single = "gpos_single",
chainsubstitution = "gsub_contextchain",
chainposition = "gpos_contextchain",
}
@@ -403,16 +411,16 @@ local function addfeature(data,feature,specifications)
return coverage
end
+ local prepare_single = prepare_pair -- we could have a better test on the spec
+
local function prepare_chain(list,featuretype,sublookups)
-- todo: coveractions
local rules = list.rules
local coverage = { }
if rules then
- local rulehash = { }
- local rulesize = 0
- local sequence = { }
- local nofsequences = 0
- local lookuptype = types[featuretype]
+ local rulehash = { }
+ local rulesize = 0
+ local lookuptype = types[featuretype]
for nofrules=1,#rules do
local rule = rules[nofrules]
local current = rule.current
@@ -442,7 +450,7 @@ local function addfeature(data,feature,specifications)
local lookups = rule.lookups or false
local subtype = nil
if lookups and sublookups then
- for k, v in next, lookups do
+ for k, v in sortedhash(lookups) do
local t = type(v)
if t == "table" then
-- already ok
@@ -460,10 +468,10 @@ local function addfeature(data,feature,specifications)
subtype = lookup.type
end
else
- lookups[k] = false -- new
+ lookups[k] = false -- { false } -- new
end
else
- lookups[k] = false -- new
+ lookups[k] = false -- { false } -- new
end
end
end
@@ -494,14 +502,17 @@ local function addfeature(data,feature,specifications)
replacements, -- 7
subtype, -- 8
}
- for unic in next, sequence[start] do
+-- for unic in next, sequence[start] do
+ for unic in sortedhash(sequence[start]) do
local cu = coverage[unic]
if not cu then
coverage[unic] = rulehash -- can now be done cleaner i think
end
end
+ sequence.n = nofsequences
end
end
+ rulehash.n = rulesize
end
return coverage
end
@@ -542,9 +553,9 @@ local function addfeature(data,feature,specifications)
local s = sequences[i]
local f = s.features
if f then
- for k in next, f do
+ for k in sortedhash(f) do -- next, f do
if k == position then
- index = i
+ index = i
break
end
end
@@ -600,6 +611,9 @@ local function addfeature(data,feature,specifications)
local steps = { }
local sublookups = specification.lookups
local category = nil
+ --
+ checkflags(specification,resources)
+ --
if sublookups then
local s = { }
for i=1,#sublookups do
@@ -621,18 +635,25 @@ local function addfeature(data,feature,specifications)
coverage = prepare_alternate(list,featuretype,nocheck)
elseif featuretype == "multiple" then
coverage = prepare_multiple(list,featuretype,nocheck)
- elseif featuretype == "kern" then
- format = "kern"
+ elseif featuretype == "kern" or featuretype == "move" then
+ format = featuretype
coverage = prepare_kern(list,featuretype)
elseif featuretype == "pair" then
format = "pair"
coverage = prepare_pair(list,featuretype)
+ elseif featuretype == "single" then
+ format = "single"
+ coverage = prepare_single(list,featuretype)
end
if coverage and next(coverage) then
nofsteps = nofsteps + 1
steps[nofsteps] = register(coverage,featuretype,format,feature,nofsteps,descriptions,resources)
end
end
+ --
+ checkmerge(specification)
+ checksteps(specification)
+ --
s[i] = {
[stepkey] = steps,
nofsteps = nofsteps,
@@ -658,14 +679,18 @@ local function addfeature(data,feature,specifications)
elseif featuretype == "multiple" then
category = "gsub"
coverage = prepare_multiple(list,featuretype,nocheck)
- elseif featuretype == "kern" then
+ elseif featuretype == "kern" or featuretype == "move" then
category = "gpos"
- format = "kern"
+ format = featuretype
coverage = prepare_kern(list,featuretype)
elseif featuretype == "pair" then
category = "gpos"
format = "pair"
coverage = prepare_pair(list,featuretype)
+ elseif featuretype == "single" then
+ category = "gpos"
+ format = "single"
+ coverage = prepare_single(list,featuretype)
elseif featuretype == "chainsubstitution" then
category = "gsub"
coverage = prepare_chain(list,featuretype,sublookups)
@@ -688,6 +713,7 @@ local function addfeature(data,feature,specifications)
askedfeatures[k] = tohash(v)
end
end
+ --
if featureflags[1] then featureflags[1] = "mark" end
if featureflags[2] then featureflags[2] = "ligature" end
if featureflags[3] then featureflags[3] = "base" end
@@ -702,6 +728,10 @@ local function addfeature(data,feature,specifications)
nofsteps = nofsteps,
type = steptype,
}
+ --
+ checkflags(sequence,resources)
+ checkmerge(sequence)
+ checksteps(sequence)
-- position | prepend | append
local first, last = getrange(sequences,category)
inject(specification,sequences,sequence,first,last,category,feature)
@@ -1004,7 +1034,8 @@ registerotffeature {
local lookups = { }
local protect = { }
local revert = { }
-local zwj = { 0x200C }
+local zwjchar = 0x200C
+local zwj = { zwjchar }
otf.addfeature {
name = "blockligatures",
@@ -1048,31 +1079,59 @@ registerotffeature {
local settings_to_array = utilities.parsers and utilities.parsers.settings_to_array
or function(s) return string.split(s,",") end -- for generic
+local splitter = lpeg.splitat(":")
+
local function blockligatures(str)
local t = settings_to_array(str)
for i=1,#t do
- local ti = utfsplit(t[i])
- if #ti > 1 then
- local one = ti[1]
- local two = ti[2]
- lookups[one] = { one, 0x200C }
+ local ti = t[i]
+ local before, current, after = lpegmatch(splitter,ti)
+ if current and after then -- before is returned when no match
+ -- experimental joke
+ if before then
+ before = utfsplit(before)
+ for i=1,#before do
+ before[i] = { before[i] }
+ end
+ end
+ if current then
+ current = utfsplit(current)
+ end
+ if after then
+ after = utfsplit(after)
+ for i=1,#after do
+ after[i] = { after[i] }
+ end
+ end
+ else
+ before = nil
+ current = utfsplit(ti)
+ after = nil
+ end
+ if #current > 1 then
+ local one = current[1]
+ local two = current[2]
+ lookups[one] = { one, zwjchar }
local one = { one }
local two = { two }
local new = #protect + 1
protect[new] = {
+ before = before,
current = { one, two },
+ after = after,
lookups = { 1 }, -- not shared !
}
revert[new] = {
+ -- before = before,
current = { one, zwj },
+ -- after = { two, unpack(after) },
after = { two },
lookups = { 1 }, -- not shared !
}
end
end
-
end
-- blockligatures("\0\0")
@@ -1081,6 +1140,7 @@ otf.helpers.blockligatures = blockligatures
-- blockligatures("fi,ff")
-- blockligatures("fl")
+-- blockligatures("u:fl:age")
if context then
diff --git a/tex/context/base/mkiv/font-otd.lua b/tex/context/base/mkiv/font-otd.lua
index 64cb1bcb4..2633c6cae 100644
--- a/tex/context/base/mkiv/font-otd.lua
+++ b/tex/context/base/mkiv/font-otd.lua
@@ -142,6 +142,7 @@ local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr
local order = sequence.order
if order then
local featuretype = featuretypes[sequence.type or "unknown"]
+ local lookupdone = false
for i=1,#order do --
local kind = order[i] --
local e_e
@@ -152,16 +153,14 @@ local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr
e_e = s_enabled and s_enabled[kind] -- the value (font)
end
if e_e then
+ local usedattribute, usedscript, usedlanguage, usedlookup
local valid = type(e_e) == "string" and lpegmatch(pattern,e_e)
if valid then
-- we have hit always
- local attribute = autofeatures[kind] or false
- if trace_applied then
- report_process(
- "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a",
- font,attr or 0,dynamic,kind,"*","*",sequence.name,valid)
- end
- ra[#ra+1] = { valid, attribute, sequence, kind }
+ usedattribute = autofeatures[kind] or false
+ usedlanguage = "*"
+ usedscript = "*"
+ usedlookup = { valid, usedattribute, sequence, kind }
else
-- we already checked for e_e
local scripts = features[kind] --
@@ -170,7 +169,7 @@ local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr
langages = defaultscript(featuretype,autoscript,scripts)
end
if languages then
- -- we need detailed control over default becase we want to trace
+ -- we need detailed control over default because we want to trace
-- only first attribute match check, so we assume simple fina's
-- local valid = false
if languages[language] then
@@ -182,14 +181,30 @@ local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr
end
end
if valid then
- local attribute = autofeatures[kind] or false
- if trace_applied then
- report_process(
- "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a",
- font,attr or 0,dynamic,kind,script,language,sequence.name,valid)
- end
- ra[#ra+1] = { valid, attribute, sequence, kind }
+ usedattribute = autofeatures[kind] or false
+ usedlanguage = script
+ usedscript = language
+ usedlookup = { valid, usedattribute, sequence, kind }
+ end
+ end
+ if not usedlookup then
+ -- go on
+ elseif lookupdone then
+ if trace_applied then
+ report_process(
+ "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a, nofsteps %a, lookup already set by %a",
+ font,attr or 0,dynamic,kind,usedscript,usedlanguage,sequence.name,valid,sequence.nofsteps,ra[#ra][4])
+ end
+ else
+ ra[#ra+1] = usedlookup
+ if trace_applied then
+ report_process(
+ "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a, nofsteps %a",
+ font,attr or 0,dynamic,kind,usedscript,usedlanguage,sequence.name,valid,sequence.nofsteps)
+ else
+ return -- no need to look further
end
+ lookupdone = true
end
end
end
@@ -253,7 +268,7 @@ function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in specia
-- indexed but we can also add specific data by key in:
}
rl[attr] = ra
- local sequences = tfmdata.resources.sequences
+ local sequences = tfmdata.shared.reorderedsequences or tfmdata.resources.sequences
if sequences then
local autoscript = (s_enabled and s_enabled.autoscript ) or (a_enabled and a_enabled.autoscript )
local autolanguage = (s_enabled and s_enabled.autolanguage) or (a_enabled and a_enabled.autolanguage)
diff --git a/tex/context/base/mkiv/font-otf.lua b/tex/context/base/mkiv/font-otf.lua
deleted file mode 100644
index 1db80272e..000000000
--- a/tex/context/base/mkiv/font-otf.lua
+++ /dev/null
@@ -1,2968 +0,0 @@
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- langs -> languages enz
--- anchor_classes vs kernclasses
--- modification/creationtime in subfont is runtime dus zinloos
--- to_table -> totable
--- ascent descent
-
--- to be checked: combinations like:
---
--- current="ABCD" with [A]=nothing, [BC]=ligature, [D]=single (applied to result of BC so funny index)
---
--- unlikely but possible
-
--- more checking against low level calls of functions
-
-local gmatch, gsub, find, match, lower, strip = string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local abs = math.abs
-local reversed, concat, insert, remove, sortedkeys = table.reversed, table.concat, table.insert, table.remove, table.sortedkeys
-local fastcopy, tohash, derivetable, copy = table.fastcopy, table.tohash, table.derive, table.copy
-local formatters = string.formatters
-local P, R, S, C, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.match
-
-local setmetatableindex = table.setmetatableindex
-local allocate = utilities.storage.allocate
-local registertracker = trackers.register
-local registerdirective = directives.register
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local elapsedtime = statistics.elapsedtime
-local findbinfile = resolvers.findbinfile
-
-local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
-local trace_subfonts = false registertracker("otf.subfonts", function(v) trace_subfonts = v end)
-local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
-local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
-local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
-local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
-
-local compact_lookups = true registertracker("otf.compactlookups", function(v) compact_lookups = v end)
-local purge_names = true registertracker("otf.purgenames", function(v) purge_names = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-otf.glists = { "gsub", "gpos" }
-
-otf.version = 2.826 -- beware: also sync font-mis.lua and in mtx-fonts
-otf.cache = containers.define("fonts", "otf", otf.version, true)
-
-local hashes = fonts.hashes
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
-local fontdata = hashes and hashes.identifiers
-local chardata = characters and characters.data -- not used
-
-local otffeatures = constructors.features.otf
-local registerotffeature = otffeatures.register
-
-local otfenhancers = constructors.enhancers.otf
-local registerotfenhancer = otfenhancers.register
-
-local forceload = false
-local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
-local packdata = true
-local syncspace = true
-local forcenotdef = false
-local includesubfonts = false
-local overloadkerns = false -- experiment
-
-local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
-
-local wildcard = "*"
-local default = "dflt"
-
-local fontloader = fontloader
-local open_font = fontloader.open
-local close_font = fontloader.close
-local font_fields = fontloader.fields
-local apply_featurefile = fontloader.apply_featurefile
-
-local mainfields = nil
-local glyphfields = nil -- not used yet
-
-local formats = fonts.formats
-
-formats.otf = "opentype"
-formats.ttf = "truetype"
-formats.ttc = "truetype"
-formats.dfont = "truetype"
-
-registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
-registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
-registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
-registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
-registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
-registerdirective("fonts.otf.loader.overloadkerns", function(v) overloadkerns = v end)
------------------("fonts.otf.loader.alldimensions", function(v) alldimensions = v end)
-
-function otf.fileformat(filename)
- local leader = lower(io.loadchunk(filename,4))
- local suffix = lower(file.suffix(filename))
- if leader == "otto" then
- return formats.otf, suffix == "otf"
- elseif leader == "ttcf" then
- return formats.ttc, suffix == "ttc"
- -- elseif leader == "true" then
- -- return formats.ttf, suffix == "ttf"
- elseif suffix == "ttc" then
- return formats.ttc, true
- elseif suffix == "dfont" then
- return formats.dfont, true
- else
- return formats.ttf, suffix == "ttf"
- end
-end
-
--- local function otf_format(filename)
--- -- return formats[lower(file.suffix(filename))]
--- end
-
-local function otf_format(filename)
- local format, okay = otf.fileformat(filename)
- if not okay then
- report_otf("font %a is actually an %a file",filename,format)
- end
- return format
-end
-
-local function load_featurefile(raw,featurefile)
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("using featurefile %a", featurefile)
- end
- apply_featurefile(raw, featurefile)
- end
-end
-
-local function showfeatureorder(rawdata,filename)
- local sequences = rawdata.resources.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- report_otf("font %a has %s sequences",filename,#sequences)
- report_otf(" ")
- end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
- if trace_loading then
- report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
- end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- if type(scripts) == "table" then
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
- end
- tt[#tt+1] = formatters["[%s: % t]"](script,ttt)
- end
- if trace_loading then
- report_otf(" %s: % t",feature,tt)
- end
- else
- if trace_loading then
- report_otf(" %s: %S",feature,scripts)
- end
- end
- end
- end
- end
- if trace_loading then
- report_otf("\n")
- end
- elseif trace_loading then
- report_otf("font %a has no sequences",filename)
- end
-end
-
---[[ldx--
-We start with a lot of tables and related functions.
---ldx]]--
-
-local valid_fields = table.tohash {
- -- "anchor_classes",
- "ascent",
- -- "cache_version",
- "cidinfo",
- "copyright",
- -- "creationtime",
- "descent",
- "design_range_bottom",
- "design_range_top",
- "design_size",
- "encodingchanged",
- "extrema_bound",
- "familyname",
- "fontname",
- "fontstyle_id",
- "fontstyle_name",
- "fullname",
- -- "glyphs",
- "hasvmetrics",
- -- "head_optimized_for_cleartype",
- "horiz_base",
- "issans",
- "isserif",
- "italicangle",
- -- "kerns",
- -- "lookups",
- "macstyle",
- -- "modificationtime",
- "notdef_loc",
- "onlybitmaps",
- "origname",
- "os2_version",
- "pfminfo",
- -- "private",
- "serifcheck",
- "sfd_version",
- -- "size",
- "strokedfont",
- "strokewidth",
- -- "subfonts",
- "table_version",
- -- "tables",
- -- "ttf_tab_saved",
- "ttf_tables",
- "uni_interp",
- "uniqueid",
- "units_per_em",
- "upos",
- "use_typo_metrics",
- "uwidth",
- "validation_state",
- "version",
- "vert_base",
- "weight",
- "weight_width_slope_only",
- -- "xuid",
- -- "truetype", -- maybe as check
-}
-
-local function adddimensions(data,filename)
- -- todo: forget about the width if it's the defaultwidth (saves mem)
- -- we could also build the marks hash here (instead of storing it)
- if data then
- local descriptions = data.descriptions
- local resources = data.resources
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- local basename = trace_markwidth and file.basename(filename)
- for _, d in next, descriptions do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- -- or bb?
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
- -- d.width = -wd
- end
- if bb then
- local ht = bb[4]
- local dp = -bb[2]
- -- if alldimensions then
- -- if ht ~= 0 then
- -- d.height = ht
- -- end
- -- if dp ~= 0 then
- -- d.depth = dp
- -- end
- -- else
- if ht == 0 or ht < 0 then
- -- not set
- else
- d.height = ht
- end
- if dp == 0 or dp < 0 then
- -- not set
- else
- d.depth = dp
- end
- -- end
- end
- end
- end
-end
-
-function otf.load(filename,sub,featurefile) -- second argument (format) is gone !
- local base = file.basename(file.removesuffix(filename))
- local name = file.removesuffix(base)
- local attr = lfs.attributes(filename)
- local size = attr and attr.size or 0
- local time = attr and attr.modification or 0
- if featurefile then
- name = name .. "@" .. file.removesuffix(file.basename(featurefile))
- end
- -- or: sub = tonumber(sub)
- if sub == "" then
- sub = false
- end
- local hash = name
- if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local featurefiles
- if featurefile then
- featurefiles = { }
- for s in gmatch(featurefile,"[^,]+") do
- local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
- if name == "" then
- report_otf("loading error, no featurefile %a",s)
- else
- local attr = lfs.attributes(name)
- featurefiles[#featurefiles+1] = {
- name = name,
- size = attr and attr.size or 0,
- time = attr and attr.modification or 0,
- }
- end
- end
- if #featurefiles == 0 then
- featurefiles = nil
- end
- end
- local data = containers.read(otf.cache,hash)
- local reload = not data or data.size ~= size or data.time ~= time
- if forceload then
- report_otf("forced reload of %a due to hard coded flag",filename)
- reload = true
- end
- if not reload then
- local featuredata = data.featuredata
- if featurefiles then
- if not featuredata or #featuredata ~= #featurefiles then
- reload = true
- else
- for i=1,#featurefiles do
- local fi, fd = featurefiles[i], featuredata[i]
- if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
- reload = true
- break
- end
- end
- end
- elseif featuredata then
- reload = true
- end
- if reload then
- report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
- end
- end
- if reload then
- starttiming("fontloader")
- report_otf("loading %a, hash %a",filename,hash)
- local fontdata, messages
- if sub then
- fontdata, messages = open_font(filename,sub)
- else
- fontdata, messages = open_font(filename)
- end
- if fontdata then
- mainfields = mainfields or (font_fields and font_fields(fontdata))
- end
- if trace_loading and messages and #messages > 0 then
- if type(messages) == "string" then
- report_otf("warning: %s",messages)
- else
- for m=1,#messages do
- report_otf("warning: %S",messages[m])
- end
- end
- else
- report_otf("loading done")
- end
- if fontdata then
- if featurefiles then
- for i=1,#featurefiles do
- load_featurefile(fontdata,featurefiles[i].name)
- end
- end
- local unicodes = {
- -- names to unicodes
- }
- local splitter = lpeg.splitter(" ",unicodes)
- data = {
- size = size,
- time = time,
- subfont = sub,
- format = otf_format(filename),
- featuredata = featurefiles,
- resources = {
- filename = resolvers.unresolve(filename), -- no shortcut
- version = otf.version,
- creator = "context mkiv",
- unicodes = unicodes,
- indices = {
- -- index to unicodes
- },
- duplicates = {
- -- alternative unicodes
- },
- variants = {
- -- alternative unicodes (variants)
- },
- lookuptypes = {
- },
- },
- warnings = {
- },
- metadata = {
- -- raw metadata, not to be used
- },
- properties = {
- -- normalized metadata
- },
- descriptions = {
- },
- goodies = {
- },
- helpers = { -- might go away
- tounicodelist = splitter,
- tounicodetable = Ct(splitter),
- },
- }
- report_otf("file size: %s", size)
- otfenhancers.apply(data,filename,fontdata)
- local packtime = { }
- if packdata then
- if cleanup > 0 then
- collectgarbage("collect")
- end
- starttiming(packtime)
- otf.packdata(data,filename,nil) -- implemented elsewhere
- stoptiming(packtime)
- end
- report_otf("saving %a in cache",filename)
- data = containers.write(otf.cache, hash, data)
- if cleanup > 1 then
- collectgarbage("collect")
- end
- stoptiming("fontloader")
- if elapsedtime then
- report_otf("loading, optimizing, packing and caching time %s, pack time %s",
- elapsedtime("fontloader"),packdata and elapsedtime(packtime) or 0)
- end
- close_font(fontdata) -- free memory
- if cleanup > 3 then
- collectgarbage("collect")
- end
- data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- if cleanup > 2 then
- collectgarbage("collect")
- end
- else
- stoptiming("fontloader")
- data = nil
- report_otf("loading failed due to read error")
- end
- end
- if data then
- if trace_defining then
- report_otf("loading from cache using hash %a",hash)
- end
- otf.unpackdata(data,filename,nil,false) -- implemented elsewhere
- --
- local resources = data.resources
- local lookuptags = resources.lookuptags
- local unicodes = resources.unicodes
- if not lookuptags then
- lookuptags = { }
- resources.lookuptags = lookuptags
- end
- setmetatableindex(lookuptags,function(t,k)
- local v = type(k) == "number" and ("lookup " .. k) or k
- t[k] = v
- return v
- end)
- if not unicodes then
- unicodes = { }
- resources.unicodes = unicodes
- setmetatableindex(unicodes,function(t,k)
- -- use rawget when no table has to be built
- setmetatableindex(unicodes,nil)
- for u, d in next, data.descriptions do
- local n = d.name
- if n then
- t[n] = u
- -- report_otf("accessing known name %a",k)
- else
- -- report_otf("accessing unknown name %a",k)
- end
- end
- return rawget(t,k)
- end)
- end
- constructors.addcoreunicodes(unicodes) -- do we really need this?
- --
- if applyruntimefixes then
- applyruntimefixes(filename,data)
- end
- adddimensions(data,filename,nil,false)
- if trace_sequences then
- showfeatureorder(data,filename)
- end
- end
- return data
-end
-
-local mt = {
- __index = function(t,k) -- maybe set it
- if k == "height" then
- local ht = t.boundingbox[4]
- return ht < 0 and 0 or ht
- elseif k == "depth" then
- local dp = -t.boundingbox[2]
- return dp < 0 and 0 or dp
- elseif k == "width" then
- return 0
- elseif k == "name" then -- or maybe uni*
- return forcenotdef and ".notdef"
- end
- end
-}
-
-local function enhance_prepare_tables(data,filename,raw)
- data.properties.hasitalics = false
-end
-
-local function somecopy(old) -- fast one
- if old then
- local new = { }
- if type(old) == "table" then
- for k, v in next, old do
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- else
- for i=1,#mainfields do
- local k = mainfields[i]
- local v = old[k]
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- end
- return new
- else
- return { }
- end
-end
-
--- not setting hasitalics and class (when nil) during table construction can save some mem
-
-local function enhance_prepare_glyphs(data,filename,raw)
- local rawglyphs = raw.glyphs
- local rawsubfonts = raw.subfonts
- local rawcidinfo = raw.cidinfo
- local criterium = constructors.privateoffset
- local private = criterium
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicode
- local duplicates = resources.duplicates
- local variants = resources.variants
- local notdefindex = -1
-
- if rawsubfonts then
-
- metadata.subfonts = includesubfonts and { }
- properties.cidinfo = rawcidinfo
-
- if rawcidinfo.registry then
- local cidmap = fonts.cid.getmap(rawcidinfo)
- if cidmap then
- rawcidinfo.usedname = cidmap.usedname
- local nofnames = 0
- local nofunicodes = 0
- local cidunicodes = cidmap.unicodes
- local cidnames = cidmap.names
- local cidtotal = 0
- local unique = trace_subfonts and { }
- for cidindex=1,#rawsubfonts do
- local subfont = rawsubfonts[cidindex]
- local cidglyphs = subfont.glyphs
- if includesubfonts then
- metadata.subfonts[cidindex] = somecopy(subfont)
- end
- local cidcnt = subfont.glyphcnt
- local cidmin = subfont.glyphmin
- local cidmax = subfont.glyphmax
- local notdef = (tonumber(raw.table_version) or 0) > 0.4 and subfont.notdef_loc or -1
- if notdeffound == -1 and notdef >= 0 then
- notdeffound = notdef
- end
- if trace_subfonts then
- local cidtot = cidmax - cidmin + 1
- cidtotal = cidtotal + cidtot
- report_otf("subfont: %i, min: %i, max: %i, cnt: %i, n: %i",cidindex,cidmin,cidmax,cidtot,cidcnt)
- end
- if cidcnt > 0 then
- for index=cidmin,cidmax do
- local glyph = cidglyphs[index]
- if glyph then
- if trace_subfonts then
- unique[index] = true
- end
- local unicode = glyph.unicode
- if unicode >= 0x00E000 and unicode <= 0x00F8FF then
- unicode = -1
- elseif unicode >= 0x0F0000 and unicode <= 0x0FFFFD then
- unicode = -1
- elseif unicode >= 0x100000 and unicode <= 0x10FFFD then
- unicode = -1
- end
- local name = glyph.name or cidnames[index]
- if not unicode or unicode == -1 then -- or unicode >= criterium then
- unicode = cidunicodes[index]
- end
- if unicode and descriptions[unicode] then
- if trace_private then
- report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
- end
- unicode = -1
- end
- if not unicode or unicode == -1 then -- or unicode >= criterium then
- if not name then
- name = formatters["u%06X.ctx"](private)
- end
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
- end
- private = private + 1
- nofnames = nofnames + 1
- else
- -- if unicode > criterium then
- -- local taken = descriptions[unicode]
- -- if taken then
- -- private = private + 1
- -- descriptions[private] = taken
- -- unicodes[taken.name] = private
- -- indices[taken.index] = private
- -- if trace_private then
- -- report_otf("slot %U is moved to %U due to private in font",unicode)
- -- end
- -- end
- -- end
- if not name then
- name = formatters["u%06X.ctx"](unicode)
- end
- unicodes[name] = unicode
- nofunicodes = nofunicodes + 1
- end
- indices[index] = unicode -- each index is unique (at least now)
- local description = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- -- name = glyph.name or name or "unknown", -- uniXXXX
- name = name or "unknown", -- uniXXXX
- -- cidindex = cidindex,
- index = index,
- glyph = glyph,
- }
- descriptions[unicode] = description
- local altuni = glyph.altuni
- if altuni then
- -- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- if u ~= unicode then
- local v = a.variant
- if v then
- -- tricky: no addition to d? needs checking but in practice such dups are either very simple
- -- shapes or e.g cjk with not that many features
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- -- elseif d then
- -- d[#d+1] = u
- -- else
- -- d = { u }
- end
- end
- end
- -- if d then
- -- duplicates[unicode] = d -- is this needed ?
- -- end
- end
- end
- end
- else
- report_otf("potential problem: no glyphs found in subfont %i",cidindex)
- end
- end
- if trace_subfonts then
- report_otf("nofglyphs: %i, unique: %i",cidtotal,table.count(unique))
- end
- if trace_loading then
- report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
- end
- elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %a",filename)
- end
- elseif trace_loading then
- report_otf("font %a has no glyphs",filename)
- end
-
- else
-
- local cnt = raw.glyphcnt or 0
- local min = raw.glyphmin or 0
- local max = raw.glyphmax or (raw.glyphcnt - 1)
- notdeffound = (tonumber(raw.table_version) or 0) > 0.4 and raw.notdef_loc or -1
- if cnt > 0 then
- for index=min,max do
- local glyph = rawglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name
- if not unicode or unicode == -1 then -- or unicode >= criterium then
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
- end
- private = private + 1
- else
- -- We have a font that uses and exposes the private area. As this is rather unreliable it's
- -- advised no to trust slots here (better use glyphnames). Anyway, we need a double check:
- -- we need to move already moved entries and we also need to bump the next private to after
- -- the (currently) last slot. This could leave us with a hole but we have holes anyway.
- if unicode > criterium then
- -- \definedfont[file:HANBatang-LVT.ttf] \fontchar{uF0135} \char"F0135
- local taken = descriptions[unicode]
- if taken then
- if unicode >= private then
- private = unicode + 1 -- restart private (so we can have mixed now)
- else
- private = private + 1 -- move on
- end
- descriptions[private] = taken
- unicodes[taken.name] = private
- indices[taken.index] = private
- if trace_private then
- report_otf("slot %U is moved to %U due to private in font",unicode)
- end
- else
- if unicode >= private then
- private = unicode + 1 -- restart (so we can have mixed now)
- end
- end
- end
- unicodes[name] = unicode
- end
- indices[index] = unicode
- -- if not name then
- -- name = formatters["u%06X"](unicode) -- u%06X.ctx
- -- end
- descriptions[unicode] = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = name,
- index = index,
- glyph = glyph,
- }
- local altuni = glyph.altuni
- if altuni then
- -- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- if u ~= unicode then
- local v = a.variant
- if v then
- -- tricky: no addition to d? needs checking but in practice such dups are either very simple
- -- shapes or e.g cjk with not that many features
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- -- elseif d then
- -- d[#d+1] = u
- -- else
- -- d = { u }
- end
- end
- end
- -- if d then
- -- duplicates[unicode] = d -- is this needed ?
- -- end
- end
- else
- report_otf("potential problem: glyph %U is used but empty",index)
- end
- end
- else
- report_otf("potential problem: no glyphs found")
- end
-
- end
-
- if notdeffound == -1 then
- report_otf("warning: no .notdef found in %a",filename)
- elseif notdeffound ~= 0 then
- report_otf("warning: .notdef found at position %a in %a",notdeffound,filename)
- end
- metadata.notdef = notdeffound
-
- resources.private = private
-
-end
-
--- the next one is still messy but will get better when we have
--- flattened map/enc tables in the font loader
-
--- the next one is not using a valid base for unicode privates
---
--- PsuedoEncodeUnencoded(EncMap *map,struct ttfinfo *info)
-
-local function enhance_check_encoding(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
-
- -- begin of messy (not needed when cidmap)
-
- local mapdata = raw.map or { }
- local unicodetoindex = mapdata and mapdata.map or { }
- local indextounicode = mapdata and mapdata.backmap or { }
- -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
- local encname = lower(data.enc_name or mapdata.enc_name or "")
- local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
- local privateoffset = constructors.privateoffset
-
- -- end of messy
-
- if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
- if trace_loading then
- report_otf("checking embedded unicode map %a",encname)
- end
- local reported = { }
- -- we loop over the original unicode->index mapping but we
- -- need to keep in mind that that one can have weird entries
- -- so we need some extra checking
- for maybeunicode, index in next, unicodetoindex do
- if descriptions[maybeunicode] then
- -- we ignore invalid unicodes (unicode = -1) (ff can map wrong to non private)
- else
- local unicode = indices[index]
- if not unicode then
- -- weird (cjk or so?)
- elseif maybeunicode == unicode then
- -- no need to add
- elseif unicode > privateoffset then
- -- we have a non-unicode
- else
- local d = descriptions[unicode]
- if d then
- local c = d.copies
- if c then
- c[maybeunicode] = true
- else
- d.copies = { [maybeunicode] = true }
- end
- elseif index and not reported[index] then
- report_otf("missing index %i",index)
- reported[index] = true
- end
- end
- end
- end
- for unicode, data in next, descriptions do
- local d = data.copies
- if d then
- duplicates[unicode] = sortedkeys(d)
- data.copies = nil
- end
- end
- elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
- else
- report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
- end
-
- if mapdata then
- mapdata.map = { } -- clear some memory (virtual and created each time anyway)
- mapdata.backmap = { } -- clear some memory (virtual and created each time anyway)
- end
-end
-
--- for the moment we assume that a font with lookups will not use
--- altuni so we stick to kerns only .. alternatively we can always
--- do an indirect lookup uni_to_uni . but then we need that in
--- all lookups
-
-local function enhance_add_duplicates(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
- -- for unicode, d in next, duplicates do
- for unicode, d in table.sortedhash(duplicates) do -- nicer for log
- local nofduplicates = #d
- if nofduplicates > 4 then
- if trace_loading then
- report_otf("ignoring excessive duplicates of %U (n=%s)",unicode,nofduplicates)
- end
- else
- -- local validduplicates = { }
- for i=1,nofduplicates do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local n = 0
- for _, description in next, descriptions do
- local kerns = description.kerns
- if kerns then
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
- end
- end
- end
- -- todo: lookups etc
- end
- if u > 0 then -- and
- local duplicate = copy(description) -- else packing problem
- duplicate.comment = formatters["copy of %U"](unicode)
- descriptions[u] = duplicate
- -- validduplicates[#validduplicates+1] = u
- if trace_loading then
- report_otf("duplicating %06U to %06U with index %H (%s kerns)",unicode,u,description.index,n)
- end
- end
- end
- end
- -- duplicates[unicode] = #validduplicates > 0 and validduplicates or nil
- end
- end
-end
-
--- class : nil base mark ligature component (maybe we don't need it in description)
--- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
-
-local function enhance_analyze_glyphs(data,filename,raw) -- maybe integrate this in the previous
- local descriptions = data.descriptions
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local hasitalics = false
- local widths = { }
- local marks = { } -- always present (saves checking)
- for unicode, description in next, descriptions do
- local glyph = description.glyph
- local italic = glyph.italic_correction -- only in a math font (we also have vert/horiz)
- if not italic then
- -- skip
- elseif italic == 0 then
- -- skip
- else
- description.italic = italic
- hasitalics = true
- end
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- local class = glyph.class
- if class then
- if class == "mark" then
- marks[unicode] = true
- end
- description.class = class
- end
- end
- -- flag italic
- properties.hasitalics = hasitalics
- -- flag marks
- resources.marks = marks
- -- share most common width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
- end
- end
- if most > 1000 then -- maybe 500
- if trace_loading then
- report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for unicode, description in next, descriptions do
- if description.width == wd then
- -- description.width = nil
- else
- description.width = description.glyph.width
- end
- end
- resources.defaultwidth = wd
- else
- for unicode, description in next, descriptions do
- description.width = description.glyph.width
- end
- end
-end
-
-local function enhance_reorganize_mark_classes(data,filename,raw)
- local mark_classes = raw.mark_classes
- if mark_classes then
- local resources = data.resources
- local unicodes = resources.unicodes
- local markclasses = { }
- resources.markclasses = markclasses -- reversed
- for name, class in next, mark_classes do
- local t = { }
- for s in gmatch(class,"[^ ]+") do
- t[unicodes[s]] = true
- end
- markclasses[name] = t
- end
- end
-end
-
-local function enhance_reorganize_features(data,filename,raw) -- combine with other
- local features = { }
- data.resources.features = features
- for k=1,#otf.glists do
- local what = otf.glists[k]
- local dw = raw[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d= dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- end
- end
- end
- end
-end
-
-local function enhance_reorganize_anchor_classes(data,filename,raw)
- local resources = data.resources
- local anchor_to_lookup = { }
- local lookup_to_anchor = { }
- resources.anchor_to_lookup = anchor_to_lookup
- resources.lookup_to_anchor = lookup_to_anchor
- local classes = raw.anchor_classes -- anchor classes not in final table
- if classes then
- for c=1,#classes do
- local class = classes[c]
- local anchor = class.name
- local lookups = class.lookup
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local a = anchor_to_lookup[anchor]
- if not a then
- a = { }
- anchor_to_lookup[anchor] = a
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- local l = lookup_to_anchor[lookup]
- if l then
- l[anchor] = true
- else
- l = { [anchor] = true }
- lookup_to_anchor[lookup] = l
- end
- a[lookup] = true
- end
- end
- end
-end
-
--- local function checklookups(data,missing,nofmissing)
--- local resources = data.resources
--- local unicodes = resources.unicodes
--- local lookuptypes = resources.lookuptypes
--- if not unicodes or not lookuptypes then
--- return
--- elseif nofmissing <= 0 then
--- return
--- end
--- local descriptions = data.descriptions
--- local private = fonts.constructors and fonts.constructors.privateoffset or 0xF0000 -- 0x10FFFF
--- --
--- local ns, nl = 0, 0
---
--- local guess = { }
--- -- helper
--- local function check(gname,code,unicode)
--- local description = descriptions[code]
--- -- no need to add a self reference
--- local variant = description.name
--- if variant == gname then
--- return
--- end
--- -- the variant already has a unicode (normally that results in a default tounicode to self)
--- local unic = unicodes[variant]
--- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
--- -- no default mapping and therefore maybe no tounicode yet
--- else
--- return
--- end
--- -- the variant already has a tounicode
--- if descriptions[code].unicode then
--- return
--- end
--- -- add to the list
--- local g = guess[variant]
--- -- local r = overloads[unicode]
--- -- if r then
--- -- unicode = r.unicode
--- -- end
--- if g then
--- g[gname] = unicode
--- else
--- guess[variant] = { [gname] = unicode }
--- end
--- end
--- --
--- for unicode, description in next, descriptions do
--- local slookups = description.slookups
--- if slookups then
--- local gname = description.name
--- for tag, data in next, slookups do
--- local lookuptype = lookuptypes[tag]
--- if lookuptype == "alternate" then
--- for i=1,#data do
--- check(gname,data[i],unicode)
--- end
--- elseif lookuptype == "substitution" then
--- check(gname,data,unicode)
--- end
--- end
--- end
--- local mlookups = description.mlookups
--- if mlookups then
--- local gname = description.name
--- for tag, list in next, mlookups do
--- local lookuptype = lookuptypes[tag]
--- if lookuptype == "alternate" then
--- for i=1,#list do
--- local data = list[i]
--- for i=1,#data do
--- check(gname,data[i],unicode)
--- end
--- end
--- elseif lookuptype == "substitution" then
--- for i=1,#list do
--- check(gname,list[i],unicode)
--- end
--- end
--- end
--- end
--- end
--- -- resolve references
--- local done = true
--- while done do
--- done = false
--- for k, v in next, guess do
--- if type(v) ~= "number" then
--- for kk, vv in next, v do
--- if vv == -1 or vv >= private or (vv >= 0xE000 and vv <= 0xF8FF) or vv == 0xFFFE or vv == 0xFFFF then
--- local uu = guess[kk]
--- if type(uu) == "number" then
--- guess[k] = uu
--- done = true
--- end
--- else
--- guess[k] = vv
--- done = true
--- end
--- end
--- end
--- end
--- end
--- -- wrap up
--- local orphans = 0
--- local guessed = 0
--- for k, v in next, guess do
--- if type(v) == "number" then
--- descriptions[unicodes[k]].unicode = descriptions[v].unicode or v -- can also be a table
--- guessed = guessed + 1
--- else
--- local t = nil
--- local l = lower(k)
--- local u = unicodes[l]
--- if not u then
--- orphans = orphans + 1
--- elseif u == -1 or u >= private or (u >= 0xE000 and u <= 0xF8FF) or u == 0xFFFE or u == 0xFFFF then
--- local unicode = descriptions[u].unicode
--- if unicode then
--- descriptions[unicodes[k]].unicode = unicode
--- guessed = guessed + 1
--- else
--- orphans = orphans + 1
--- end
--- else
--- orphans = orphans + 1
--- end
--- end
--- end
--- if trace_loading and orphans > 0 or guessed > 0 then
--- report_otf("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans)
--- end
--- end
-
-local function enhance_prepare_tounicode(data,filename,raw)
- fonts.mappings.addtounicode(data,filename)
-end
-
-local g_directions = {
- gsub_contextchain = 1,
- gpos_contextchain = 1,
- -- gsub_context = 1,
- -- gpos_context = 1,
- gsub_reversecontextchain = -1,
- gpos_reversecontextchain = -1,
-}
--- The following is no longer needed as AAT is ignored per end October 2013.
---
--- -- Research by Khaled Hosny has demonstrated that the font loader merges
--- -- regular and AAT features and that these can interfere (especially because
--- -- we dropped checking for valid features elsewhere. So, we just check for
--- -- the special flag and drop the feature if such a tag is found.
---
--- local function supported(features)
--- for i=1,#features do
--- if features[i].ismac then
--- return false
--- end
--- end
--- return true
--- end
-
-local function enhance_reorganize_subtables(data,filename,raw)
- local resources = data.resources
- local sequences = { }
- local lookups = { }
- local chainedfeatures = { }
- resources.sequences = sequences
- resources.lookups = lookups -- we also have lookups in data itself
- for k=1,#otf.glists do
- local what = otf.glists[k]
- local dw = raw[what]
- if dw then
- for k=1,#dw do
- local gk = dw[k]
- local features = gk.features
- -- if not features or supported(features) then -- not always features !
- local typ = gk.type
- local chain = g_directions[typ] or 0
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- t[s] = subtables[s].name
- end
- subtables = t
- end
- local flags, markclass = gk.flags, nil
- if flags then
- local t = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- markclass = flags.mark_class
- if markclass then
- markclass = resources.markclasses[markclass]
- end
- flags = t
- end
- --
- local name = gk.name
- --
- if not name then
- -- in fact an error
- report_otf("skipping weird lookup number %s",k)
- elseif features then
- -- scripts, tag, ismac
- local f = { }
- local o = { }
- for i=1,#features do
- local df = features[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- o[#o+1] = tag
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- sequences[#sequences+1] = {
- type = typ,
- chain = chain,
- flags = flags,
- name = name,
- subtables = subtables,
- markclass = markclass,
- features = f,
- order = o,
- }
- else
- lookups[name] = {
- type = typ,
- chain = chain,
- flags = flags,
- subtables = subtables,
- markclass = markclass,
- }
- end
- -- end
- end
- end
- end
-end
-
-local function enhance_prepare_lookups(data,filename,raw)
- local lookups = raw.lookups
- if lookups then
- data.lookups = lookups
- end
-end
-
--- The reverse handler does a bit redundant splitting but it's seldom
--- seen so we don't bother too much. We could store the replacement
--- in the current list (value instead of true) but it makes other code
--- uglier. Maybe some day.
-
-local function t_uncover(splitter,cache,covers)
- local result = { }
- for n=1,#covers do
- local cover = covers[n]
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- cache[cover] = uncovered
- end
- result[n] = uncovered
- end
- return result
-end
-
-local function s_uncover(splitter,cache,cover)
- if cover == "" then
- return nil
- else
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- -- for i=1,#uncovered do
- -- uncovered[i] = { [uncovered[i]] = true }
- -- end
- cache[cover] = uncovered
- end
- return { uncovered }
- end
-end
-
-local function t_hashed(t,cache)
- if t then
- local ht = { }
- for i=1,#t do
- local ti = t[i]
- local tih = cache[ti]
- if not tih then
- local tn = #ti
- if tn == 1 then
- tih = { [ti[1]] = true }
- else
- tih = { }
- for i=1,tn do
- tih[ti[i]] = true
- end
- end
- cache[ti] = tih
- end
- ht[i] = tih
- end
- return ht
- else
- return nil
- end
-end
-
--- local s_hashed = t_hashed
-
-local function s_hashed(t,cache)
- if t then
- local tf = t[1]
- local nf = #tf
- if nf == 1 then
- return { [tf[1]] = true }
- else
- local ht = { }
- for i=1,nf do
- ht[i] = { [tf[i]] = true }
- end
- return ht
- end
- else
- return nil
- end
-end
-
-local function r_uncover(splitter,cache,cover,replacements)
- if cover == "" then
- return nil
- else
- -- we always have current as { } even in the case of one
- local uncovered = cover[1]
- local replaced = cache[replacements]
- if not replaced then
- replaced = lpegmatch(splitter,replacements)
- cache[replacements] = replaced
- end
- local nu, nr = #uncovered, #replaced
- local r = { }
- if nu == nr then
- for i=1,nu do
- r[uncovered[i]] = replaced[i]
- end
- end
- return r
- end
-end
-
-local function enhance_reorganize_lookups(data,filename,raw) -- we could check for "" and n == 0
- -- we prefer the before lookups in a normal order
- if data.lookups then
- local helpers = data.helpers
- local duplicates = data.resources.duplicates
- local splitter = helpers.tounicodetable
- local t_u_cache = { }
- local s_u_cache = t_u_cache -- string keys
- local t_h_cache = { }
- local s_h_cache = t_h_cache -- table keys (so we could use one cache)
- local r_u_cache = { } -- maybe shared
- helpers.matchcache = t_h_cache -- so that we can add duplicates
- --
- for _, lookup in next, data.lookups do
- local rules = lookup.rules
- if rules then
- local format = lookup.format
- if format == "class" then
- local before_class = lookup.before_class
- if before_class then
- before_class = t_uncover(splitter,t_u_cache,reversed(before_class))
- end
- local current_class = lookup.current_class
- if current_class then
- current_class = t_uncover(splitter,t_u_cache,current_class)
- end
- local after_class = lookup.after_class
- if after_class then
- after_class = t_uncover(splitter,t_u_cache,after_class)
- end
- for i=1,#rules do
- local rule = rules[i]
- local class = rule.class
- local before = class.before
- if before then
- for i=1,#before do
- before[i] = before_class[before[i]] or { }
- end
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = class.current
- local lookups = rule.lookups
- if current then
- for i=1,#current do
- current[i] = current_class[current[i]] or { }
- -- let's not be sparse
- if lookups and not lookups[i] then
- lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement
- end
- -- end of fix
- end
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = class.after
- if after then
- for i=1,#after do
- after[i] = after_class[after[i]] or { }
- end
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.class = nil
- end
- lookup.before_class = nil
- lookup.current_class = nil
- lookup.after_class = nil
- lookup.format = "coverage"
- elseif format == "coverage" then
- for i=1,#rules do
- local rule = rules[i]
- local coverage = rule.coverage
- if coverage then
- local before = coverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = coverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- -- let's not be sparse
- local lookups = rule.lookups
- if lookups then
- for i=1,#current do
- if not lookups[i] then
- lookups[i] = "" -- fix sparse array
- end
- end
- end
- --
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = coverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.coverage = nil
- end
- end
- elseif format == "reversecoverage" then -- special case, single substitution only
- for i=1,#rules do
- local rule = rules[i]
- local reversecoverage = rule.reversecoverage
- if reversecoverage then
- local before = reversecoverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = reversecoverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = reversecoverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- local replacements = reversecoverage.replacements
- if replacements then
- rule.replacements = r_uncover(splitter,r_u_cache,current,replacements)
- end
- rule.reversecoverage = nil
- end
- end
- elseif format == "glyphs" then
- -- I could store these more efficient (as not we use a nested tables for before,
- -- after and current but this features happens so seldom that I don't bother
- -- about it right now.
- for i=1,#rules do
- local rule = rules[i]
- local glyphs = rule.glyphs
- if glyphs then
- local fore = glyphs.fore
- if fore and fore ~= "" then
- fore = s_uncover(splitter,s_u_cache,fore)
- rule.after = s_hashed(fore,s_h_cache)
- end
- local back = glyphs.back
- if back then
- back = s_uncover(splitter,s_u_cache,back)
- rule.before = s_hashed(back,s_h_cache)
- end
- local names = glyphs.names
- if names then
- names = s_uncover(splitter,s_u_cache,names)
- rule.current = s_hashed(names,s_h_cache)
- end
- rule.glyphs = nil
- local lookups = rule.lookups
- if lookups then
- for i=1,#names do
- if not lookups[i] then
- lookups[i] = "" -- fix sparse array
- end
- end
- end
- end
- end
- end
- end
- end
- end
-end
-
-local function enhance_expand_lookups(data,filename,raw) -- we could check for "" and n == 0
- if data.lookups then
- local cache = data.helpers.matchcache
- if cache then
- local duplicates = data.resources.duplicates
- for key, hash in next, cache do
- local done = nil
- for key in next, hash do
- local unicode = duplicates[key]
- if not unicode then
- -- no duplicate
- elseif type(unicode) == "table" then
- -- multiple duplicates
- for i=1,#unicode do
- local u = unicode[i]
- if hash[u] then
- -- already in set
- elseif done then
- done[u] = key
- else
- done = { [u] = key }
- end
- end
- else
- -- one duplicate
- if hash[unicode] then
- -- already in set
- elseif done then
- done[unicode] = key
- else
- done = { [unicode] = key }
- end
- end
- end
- if done then
- for u in next, done do
- hash[u] = true
- end
- end
- end
- end
- end
-end
-
-local function check_variants(unicode,the_variants,splitter,unicodes)
- local variants = the_variants.variants
- if variants then -- use splitter
- local glyphs = lpegmatch(splitter,variants)
- local done = { [unicode] = true }
- local n = 0
- for i=1,#glyphs do
- local g = glyphs[i]
- if done[g] then
- if i > 1 then
- report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
- end
- else
- if n == 0 then
- n = 1
- variants = { g }
- else
- n = n + 1
- variants[n] = g
- end
- done[g] = true
- end
- end
- if n == 0 then
- variants = nil
- end
- end
- local parts = the_variants.parts
- if parts then
- local p = #parts
- if p > 0 then
- for i=1,p do
- local pi = parts[i]
- pi.glyph = unicodes[pi.component] or 0
- pi.component = nil
- end
- else
- parts = nil
- end
- end
- local italic = the_variants.italic
- if italic and italic == 0 then
- italic = nil
- end
- return variants, parts, italic
-end
-
-local function enhance_analyze_math(data,filename,raw)
- if raw.math then
- data.metadata.math = raw.math
- local unicodes = data.resources.unicodes
- local splitter = data.helpers.tounicodetable
- for unicode, description in next, data.descriptions do
- local glyph = description.glyph
- local mathkerns = glyph.mathkern -- singular
- local hvariants = glyph.horiz_variants
- local vvariants = glyph.vert_variants
- local accent = glyph.top_accent
- local italic = glyph.italic_correction
- if mathkerns or hvariants or vvariants or accent or italic then
- local math = { }
- if accent then
- math.accent = accent
- end
- if mathkerns then
- local topright = mathkerns.top_right
- local topleft = mathkerns.top_left
- local bottomright = mathkerns.bottom_right
- local bottomleft = mathkerns.bottom_left
- math.kerns = {
- topright = topright and next(topright) and topright or nil,
- topleft = topleft and next(topleft) and topleft or nil,
- bottomright = bottomright and next(bottomright) and bottomright or nil,
- bottomleft = bottomleft and next(bottomleft) and bottomleft or nil,
- }
- end
- if hvariants then
- math.hvariants, math.hparts, math.hitalic = check_variants(unicode,hvariants,splitter,unicodes)
- end
- if vvariants then
- math.vvariants, math.vparts, math.vitalic = check_variants(unicode,vvariants,splitter,unicodes)
- end
- if italic and italic ~= 0 then
- math.italic = italic
- end
- description.math = math
- end
- end
- end
-end
-
-local function enhance_reorganize_glyph_kerns(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- for unicode, description in next, descriptions do
- local kerns = description.glyph.kerns
- if kerns then
- local newkerns = { }
- for k, kern in next, kerns do
- local name = kern.char
- local offset = kern.off
- local lookup = kern.lookup
- if name and offset and lookup then
- local unicode = unicodes[name]
- if unicode then
- if type(lookup) == "table" then
- for l=1,#lookup do
- local lookup = lookup[l]
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- else
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- elseif trace_loading then
- report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
- end
- end
- end
- description.kerns = newkerns
- end
- end
-end
-
-local function enhance_merge_kern_classes(data,filename,raw)
- local gposlist = raw.gpos
- if gposlist then
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- local splitter = data.helpers.tounicodetable
- local ignored = 0
- local blocked = 0
- for gp=1,#gposlist do
- local gpos = gposlist[gp]
- local subtables = gpos.subtables
- if subtables then
- local first_done = { } -- could become an option so that we can deal with buggy fonts that don't get fixed
- local split = { } -- saves time .. although probably not that much any more in the fixed luatex kernclass table
- for s=1,#subtables do
- local subtable = subtables[s]
- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- local lookup = subtable.lookup or subtable.name
- if kernclass then -- the next one is quite slow
- -- as fas as i can see the kernclass is a table with one entry and offsets
- -- have no [1] so we could remov eon elevel (kernclass) and start offsets
- -- at 1 but we're too far down the road now to fix that
- if #kernclass > 0 then
- kernclass = kernclass[1]
- lookup = type(kernclass.lookup) == "string" and kernclass.lookup or lookup
- report_otf("fixing kernclass table of lookup %a",lookup)
- end
- local firsts = kernclass.firsts
- local seconds = kernclass.seconds
- local offsets = kernclass.offsets
- -- if offsets[1] == nil then
- -- offsets[1] = "" -- defaults ?
- -- end
- for n, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- local maxseconds = 0
- for n, s in next, seconds do
- if n > maxseconds then
- maxseconds = n
- end
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for fk=1,#firsts do -- maxfirsts ?
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local extrakerns = { }
- local baseoffset = (fk-1) * maxseconds
- -- for sk, sv in next, seconds do
- for sk=2,maxseconds do
- local sv = seconds[sk]
- if sv then
- local splt = split[sv]
- if splt then -- redundant test
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]] = offset
- end
- end
- end
- end
- end
- for i=1,#splt do
- local first_unicode = splt[i]
- if first_done[first_unicode] then
- report_otf("lookup %a: ignoring further kerns of %C",lookup,first_unicode)
- blocked = blocked + 1
- else
- first_done[first_unicode] = true
- local description = descriptions[first_unicode]
- if description then
- local kerns = description.kerns
- if not kerns then
- kerns = { } -- unicode indexed !
- description.kerns = kerns
- end
- local lookupkerns = kerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- kerns[lookup] = lookupkerns
- end
- if overloadkerns then
- for second_unicode, kern in next, extrakerns do
- lookupkerns[second_unicode] = kern
- end
- else
- for second_unicode, kern in next, extrakerns do
- local k = lookupkerns[second_unicode]
- if not k then
- lookupkerns[second_unicode] = kern
- elseif k ~= kern then
- if trace_loading then
- report_otf("lookup %a: ignoring overload of kern between %C and %C, rejecting %a, keeping %a",lookup,first_unicode,second_unicode,k,kern)
- end
- ignored = ignored + 1
- end
- end
- end
- elseif trace_loading then
- report_otf("no glyph data for %U", first_unicode)
- end
- end
- end
- end
- end
- subtable.kernclass = { }
- end
- end
- end
- end
- if ignored > 0 then
- report_otf("%s kern overloads ignored",ignored)
- end
- if blocked > 0 then
- report_otf("%s successive kerns blocked",blocked)
- end
- end
-end
-
-local function enhance_check_glyphs(data,filename,raw)
- for unicode, description in next, data.descriptions do
- description.glyph = nil
- end
-end
-
--- future versions will remove _
-
-local valid = (R("\x00\x7E") - S("(){}[]<>%/ \n\r\f\v"))^0 * P(-1)
-
-local function valid_ps_name(str)
- return str and str ~= "" and #str < 64 and lpegmatch(valid,str) and true or false
-end
-
-local function enhance_check_metadata(data,filename,raw)
- local metadata = data.metadata
- for _, k in next, mainfields do
- if valid_fields[k] then
- local v = raw[k]
- if not metadata[k] then
- metadata[k] = v
- end
- end
- end
- -- metadata.pfminfo = raw.pfminfo -- not already done?
- local ttftables = metadata.ttf_tables
- if ttftables then
- for i=1,#ttftables do
- ttftables[i].data = "deleted"
- end
- end
- --
- local state = metadata.validation_state
- local names = raw.names
- --
- if state and table.contains(state,"bad_ps_fontname") then
- -- the ff library does a bit too much (and wrong) checking ... so we need to catch this
- -- at least for now
- local function valid(what)
- if names then
- for i=1,#names do
- local list = names[i]
- local names = list.names
- if names then
- local name = names[what]
- if name and valid_ps_name(name) then
- return name
- end
- end
- end
- end
- end
- local function check(what)
- local oldname = metadata[what]
- if valid_ps_name(oldname) then
- report_otf("ignoring warning %a because %s %a is proper ASCII","bad_ps_fontname",what,oldname)
- else
- local newname = valid(what)
- if not newname then
- newname = formatters["bad-%s-%s"](what,file.nameonly(filename))
- end
- local warning = formatters["overloading %s from invalid ASCII name %a to %a"](what,oldname,newname)
- data.warnings[#data.warnings+1] = warning
- report_otf(warning)
- metadata[what] = newname
- end
- end
- check("fontname")
- check("fullname")
- end
- --
- if names then
- local psname = metadata.psname
- if not psname or psname == "" then
- for i=1,#names do
- local name = names[i]
- -- Currently we use the same restricted search as in the new context (specific) font loader
- -- but we might add more lang checks (it worked ok in the new loaded so now we're in sync)
- -- This check here is also because there are (esp) cjk fonts out there with psnames different
- -- from fontnames (gives a bad lookup in backend).
- if lower(name.lang) == "english (us)" then
- local specification = name.names
- if specification then
- local postscriptname = specification.postscriptname
- if postscriptname then
- psname = postscriptname
- end
- end
- end
- break
- end
- end
- if psname ~= metadata.fontname then
- report_otf("fontname %a, fullname %a, psname %a",metadata.fontname,metadata.fullname,psname)
- end
- metadata.psname = psname
- end
- --
- if state and table.contains(state,"bad_cmap_table") then
- report_otf("fontfile %a has bad cmap tables",filename)
- end
-end
-
-local function enhance_cleanup_tables(data,filename,raw)
- local duplicates = data.resources.duplicates
- if duplicates then
- for k, v in next, duplicates do
- if #v == 1 then
- duplicates[k] = v[1]
- end
- end
- end
- data.resources.indices = nil -- not needed
- data.resources.unicodes = nil -- delayed
- data.helpers = nil -- tricky as we have no unicodes any more
-end
-
--- kern: ttf has a table with kerns
---
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
--- we can share { } as it is never set
-
--- ligatures have an extra specification.char entry that we don't use
-
--- mlookups only with pairs and ligatures
-
-local function enhance_reorganize_glyph_lookups(data,filename,raw)
- local resources = data.resources
- local unicodes = resources.unicodes
- local descriptions = data.descriptions
- local splitter = data.helpers.tounicodelist
-
- local lookuptypes = resources.lookuptypes
-
- for unicode, description in next, descriptions do
- local lookups = description.glyph.lookups
- if lookups then
- for tag, lookuplist in next, lookups do
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local specification = lookup.specification
- local lookuptype = lookup.type
- local lt = lookuptypes[tag]
- if not lt then
- lookuptypes[tag] = lookuptype
- elseif lt ~= lookuptype then
- report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
- end
- if lookuptype == "ligature" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "alternate" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "substitution" then
- lookuplist[l] = unicodes[specification.variant]
- elseif lookuptype == "multiple" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "position" then
- lookuplist[l] = {
- specification.x or 0,
- specification.y or 0,
- specification.h or 0,
- specification.v or 0
- }
- elseif lookuptype == "pair" then
- local one = specification.offsets[1]
- local two = specification.offsets[2]
- local paired = unicodes[specification.paired]
- if one then
- if two then
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
- else
- if two then
- lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- lookuplist[l] = { paired }
- end
- end
- end
- end
- end
- local slookups, mlookups
- for tag, lookuplist in next, lookups do
- if #lookuplist == 1 then
- if slookups then
- slookups[tag] = lookuplist[1]
- else
- slookups = { [tag] = lookuplist[1] }
- end
- else
- if mlookups then
- mlookups[tag] = lookuplist
- else
- mlookups = { [tag] = lookuplist }
- end
- end
- end
- if slookups then
- description.slookups = slookups
- end
- if mlookups then
- description.mlookups = mlookups
- end
- -- description.lookups = nil
- end
- end
-end
-
-local zero = { 0, 0 }
-
-local function enhance_reorganize_glyph_anchors(data,filename,raw)
- local descriptions = data.descriptions
- for unicode, description in next, descriptions do
- local anchors = description.glyph.anchors
- if anchors then
- for class, data in next, anchors do
- if class == "baselig" then
- for tag, specification in next, data do
- -- for i=1,#specification do
- -- local si = specification[i]
- -- specification[i] = { si.x or 0, si.y or 0 }
- -- end
- -- can be sparse so we need to fill the holes
- local n = 0
- for k, v in next, specification do
- if k > n then
- n = k
- end
- local x, y = v.x, v.y
- if x or y then
- specification[k] = { x or 0, y or 0 }
- else
- specification[k] = zero
- end
- end
- local t = { }
- for i=1,n do
- t[i] = specification[i] or zero
- end
- data[tag] = t -- so # is okay (nicer for packer)
- end
- else
- for tag, specification in next, data do
- local x, y = specification.x, specification.y
- if x or y then
- data[tag] = { x or 0, y or 0 }
- else
- data[tag] = zero
- end
- end
- end
- end
- description.anchors = anchors
- end
- end
-end
-
-local bogusname = (P("uni") + P("u")) * R("AF","09")^4
- + (P("index") + P("glyph") + S("Ii") * P("dentity") * P(".")^0) * R("09")^1
-local uselessname = (1-bogusname)^0 * bogusname
-
-local function enhance_purge_names(data,filename,raw) -- not used yet
- if purge_names then
- local n = 0
- for u, d in next, data.descriptions do
- if lpegmatch(uselessname,d.name) then
- n = n + 1
- d.name = nil
- end
- -- d.comment = nil
- end
- if n > 0 then
- report_otf("%s bogus names removed",n)
- end
- end
-end
-
-local function enhance_compact_lookups(data,filename,raw)
- if not compact_lookups then
- report_otf("not compacting")
- return
- end
- -- create keyhash
- local last = 0
- local tags = table.setmetatableindex({ },
- function(t,k)
- last = last + 1
- t[k] = last
- return last
- end
- )
- --
- local descriptions = data.descriptions
- local resources = data.resources
- --
- for u, d in next, descriptions do
- --
- -- -- we can also compact anchors and cursives (basechar basemark baselig mark)
- --
- local slookups = d.slookups
- if type(slookups) == "table" then
- local s = { }
- for k, v in next, slookups do
- s[tags[k]] = v
- end
- d.slookups = s
- end
- --
- local mlookups = d.mlookups
- if type(mlookups) == "table" then
- local m = { }
- for k, v in next, mlookups do
- m[tags[k]] = v
- end
- d.mlookups = m
- end
- --
- local kerns = d.kerns
- if type(kerns) == "table" then
- local t = { }
- for k, v in next, kerns do
- t[tags[k]] = v
- end
- d.kerns = t
- end
- end
- --
- local lookups = data.lookups
- if lookups then
- local l = { }
- for k, v in next, lookups do
- local rules = v.rules
- if rules then
- for i=1,#rules do
- local l = rules[i].lookups
- if type(l) == "table" then
- for i=1,#l do
- l[i] = tags[l[i]]
- end
- end
- end
- end
- l[tags[k]] = v
- end
- data.lookups = l
- end
- --
- local lookups = resources.lookups
- if lookups then
- local l = { }
- for k, v in next, lookups do
- local s = v.subtables
- if type(s) == "table" then
- for i=1,#s do
- s[i] = tags[s[i]]
- end
- end
- l[tags[k]] = v
- end
- resources.lookups = l
- end
- --
- local sequences = resources.sequences
- if sequences then
- for i=1,#sequences do
- local s = sequences[i]
- local n = s.name
- if n then
- s.name = tags[n]
- end
- local t = s.subtables
- if type(t) == "table" then
- for i=1,#t do
- t[i] = tags[t[i]]
- end
- end
- end
- end
- --
- local lookuptypes = resources.lookuptypes
- if lookuptypes then
- local l = { }
- for k, v in next, lookuptypes do
- l[tags[k]] = v
- end
- resources.lookuptypes = l
- end
- --
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookups in next, anchor_to_lookup do
- local l = { }
- for lookup, value in next, lookups do
- l[tags[lookup]] = value
- end
- anchor_to_lookup[anchor] = l
- end
- end
- --
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- local l = { }
- for lookup, value in next, lookup_to_anchor do
- l[tags[lookup]] = value
- end
- resources.lookup_to_anchor = l
- end
- --
- tags = table.swapped(tags)
- --
- report_otf("%s lookup tags compacted",#tags)
- --
- resources.lookuptags = tags
-end
-
--- modes: node, base, none
-
-function otf.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
- if okay then
- return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
- else
- return { } -- will become false
- end
-end
-
--- the first version made a top/mid/not extensible table, now we just
--- pass on the variants data and deal with it in the tfm scaler (there
--- is no longer an extensible table anyway)
---
--- we cannot share descriptions as virtual fonts might extend them (ok,
--- we could use a cache with a hash
---
--- we already assign an empty tabel to characters as we can add for
--- instance protruding info and loop over characters; one is not supposed
--- to change descriptions and if one does so one should make a copy!
-
-local function copytotfm(data,cache_id)
- if data then
- local metadata = data.metadata
- local warnings = data.warnings
- local resources = data.resources
- local properties = derivetable(data.properties)
- local descriptions = derivetable(data.descriptions)
- local goodies = derivetable(data.goodies)
- local characters = { }
- local parameters = { }
- local mathparameters = { }
- --
- local pfminfo = metadata.pfminfo or { }
- local resources = data.resources
- local unicodes = resources.unicodes
- -- local mode = data.mode or "base"
- local spaceunits = 500
- local spacer = "space"
- local designsize = metadata.designsize or metadata.design_size or 100
- local minsize = metadata.minsize or metadata.design_range_bottom or designsize
- local maxsize = metadata.maxsize or metadata.design_range_top or designsize
- local mathspecs = metadata.math
- --
- if designsize == 0 then
- designsize = 100
- minsize = 100
- maxsize = 100
- end
- if mathspecs then
- for name, value in next, mathspecs do
- mathparameters[name] = value
- end
- end
- for unicode, _ in next, data.descriptions do -- use parent table
- characters[unicode] = { }
- end
- if mathspecs then
- -- we could move this to the scaler but not that much is saved
- -- and this is cleaner
- for unicode, character in next, characters do
- local d = descriptions[unicode]
- local m = d.math
- if m then
- -- watch out: luatex uses horiz_variants for the parts
- --
- local italic = m.italic
- local vitalic = m.vitalic
- --
- local variants = m.hvariants
- local parts = m.hparts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.horiz_variants = parts
- elseif parts then
- character.horiz_variants = parts
- italic = m.hitalic
- end
- --
- local variants = m.vvariants
- local parts = m.vparts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.vert_variants = parts
- elseif parts then
- character.vert_variants = parts
- end
- --
- if italic and italic ~= 0 then
- character.italic = italic -- overload
- end
- if vitalic and vitalic ~= 0 then
- character.vert_italic = vitalic
- end
- --
- local accent = m.accent
- if accent then
- character.accent = accent
- end
- --
- local kerns = m.kerns
- if kerns then
- character.mathkerns = kerns
- end
- end
- end
- end
- -- end math
- -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = constructors.checkedfilename(resources)
- local fontname = metadata.fontname
- local fullname = metadata.fullname or fontname
- local psname = metadata.psname or fontname or fullname
- local units = metadata.units or metadata.units_per_em or 1000
- --
- if units == 0 then -- catch bugs in fonts
- units = 1000 -- maybe 2000 when ttf
- metadata.units = 1000
- report_otf("changing %a units to %a",0,units)
- end
- --
- local monospaced = metadata.monospaced or metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
- local charwidth = pfminfo.avgwidth -- or unset
- local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
--- charwidth = charwidth * units/1000
--- charxheight = charxheight * units/1000
- local italicangle = metadata.italicangle
- properties.monospaced = monospaced
- parameters.italicangle = italicangle
- parameters.charwidth = charwidth
- parameters.charxheight = charxheight
- --
- local space = 0x0020
- local emdash = 0x2014
- if monospaced then
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- else
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits) or units/2
- --
- parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*units/5 -- 400
- parameters.quad = units -- 1000
- if spaceunits < 2*units/5 then
- -- todo: warning
- end
- if italicangle and italicangle ~= 0 then
- parameters.italicangle = italicangle
- parameters.italicfactor = math.cos(math.rad(90+italicangle))
- parameters.slant = - math.tan(italicangle*math.pi/180)
- end
- if monospaced then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif syncspace then --
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
- if charxheight then
- parameters.x_height = charxheight
- else
- local x = 0x0078
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- end
- --
- parameters.designsize = (designsize/10)*65536
- parameters.minsize = (minsize /10)*65536
- parameters.maxsize = (maxsize /10)*65536
- parameters.ascender = abs(metadata.ascender or metadata.ascent or 0)
- parameters.descender = abs(metadata.descender or metadata.descent or 0)
- parameters.units = units
- --
- properties.space = spacer
- properties.encodingbytes = 2
- properties.format = data.format or otf_format(filename) or formats.otf
- properties.noglyphnames = true
- properties.filename = filename
- properties.fontname = fontname
- properties.fullname = fullname
- properties.psname = psname
- properties.name = filename or fullname
- --
- -- properties.name = specification.name
- -- properties.sub = specification.sub
- --
- if warnings and #warnings > 0 then
- report_otf("warnings for font: %s",filename)
- report_otf()
- for i=1,#warnings do
- report_otf(" %s",warnings[i])
- end
- report_otf()
- end
- return {
- characters = characters,
- descriptions = descriptions,
- parameters = parameters,
- mathparameters = mathparameters,
- resources = resources,
- properties = properties,
- goodies = goodies,
- warnings = warnings,
- }
- end
-end
-
-local function otftotfm(specification)
- local cache_id = specification.hash
- local tfmdata = containers.read(constructors.cache,cache_id)
- if not tfmdata then
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- -- local format = specification.format
- local features = specification.features.normal
- local rawdata = otf.load(filename,sub,features and features.featurefile)
- if rawdata and next(rawdata) then
- local descriptions = rawdata.descriptions
- local duplicates = rawdata.resources.duplicates
- if duplicates then
- local nofduplicates, nofduplicated = 0, 0
- for parent, list in next, duplicates do
- if type(list) == "table" then
- local n = #list
- for i=1,n do
- local unicode = list[i]
- if not descriptions[unicode] then
- descriptions[unicode] = descriptions[parent] -- or copy
- nofduplicated = nofduplicated + 1
- end
- end
- nofduplicates = nofduplicates + n
- else
- if not descriptions[list] then
- descriptions[list] = descriptions[parent] -- or copy
- nofduplicated = nofduplicated + 1
- end
- nofduplicates = nofduplicates + 1
- end
- end
- if trace_otf and nofduplicated ~= nofduplicates then
- report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates)
- end
- end
- rawdata.lookuphash = { }
- tfmdata = copytotfm(rawdata,cache_id)
- if tfmdata and next(tfmdata) then
- -- at this moment no characters are assigned yet, only empty slots
- local features = constructors.checkedfeatures("otf",features)
- local shared = tfmdata.shared
- if not shared then
- shared = { }
- tfmdata.shared = shared
- end
- shared.rawdata = rawdata
- -- shared.features = features -- default
- shared.dynamics = { }
- -- shared.processes = { }
- tfmdata.changed = { }
- shared.features = features
- shared.processes = otf.setfeatures(tfmdata,features)
- end
- end
- containers.write(constructors.cache,cache_id,tfmdata)
- end
- return tfmdata
-end
-
-local function read_from_otf(specification)
- local tfmdata = otftotfm(specification)
- if tfmdata then
- -- this late ? .. needs checking
- tfmdata.properties.name = specification.name
- tfmdata.properties.sub = specification.sub
- --
- tfmdata = constructors.scale(tfmdata,specification)
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
- constructors.setname(tfmdata,specification) -- only otf?
- fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
- end
- return tfmdata
-end
-
-local function checkmathsize(tfmdata,mathsize)
- local mathdata = tfmdata.shared.rawdata.metadata.math
- local mathsize = tonumber(mathsize)
- if mathdata then -- we cannot use mathparameters as luatex will complain
- local parameters = tfmdata.parameters
- parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
- parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
- end
-end
-
-registerotffeature {
- name = "mathsize",
- description = "apply mathsize specified in the font",
- initializers = {
- base = checkmathsize,
- node = checkmathsize,
- }
-}
-
--- helpers
-
-function otf.collectlookups(rawdata,kind,script,language)
- local sequences = rawdata.resources.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
-end
-
--- readers (a bit messy, this forced so I might redo that bit: foo.ttf FOO.ttf foo.TTF FOO.TTF)
-
-local function check_otf(forced,specification,suffix)
- local name = specification.name
- if forced then
- name = specification.forcedname -- messy
- end
- local fullname = findbinfile(name,suffix) or ""
- if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix) or ""
- end
- if fullname ~= "" and not fonts.names.ignoredfile(fullname) then
- specification.filename = fullname
- return read_from_otf(specification)
- end
-end
-
-local function opentypereader(specification,suffix)
- local forced = specification.forced or ""
- if formats[forced] then
- return check_otf(true,specification,forced)
- else
- return check_otf(false,specification,suffix)
- end
-end
-
-readers.opentype = opentypereader -- kind of useless and obsolete
-
-function readers.otf (specification) return opentypereader(specification,"otf") end
-function readers.ttf (specification) return opentypereader(specification,"ttf") end
-function readers.ttc (specification) return opentypereader(specification,"ttf") end
-function readers.dfont(specification) return opentypereader(specification,"ttf") end
-
--- this will be overloaded
-
-function otf.scriptandlanguage(tfmdata,attr)
- local properties = tfmdata.properties
- return properties.script or "dflt", properties.language or "dflt"
-end
-
--- a little bit of abstraction
-
-local function justset(coverage,unicode,replacement)
- coverage[unicode] = replacement
-end
-
-otf.coverup = {
- stepkey = "subtables",
- actions = {
- substitution = justset,
- alternate = justset,
- multiple = justset,
- ligature = justset,
- kern = justset,
- pair = justset,
- chainsubstitution = justset,
- chainposition = justset,
- },
- register = function(coverage,lookuptype,format,feature,n,descriptions,resources)
- local name = formatters["ctx_%s_%s_%s"](feature,lookuptype,n) -- we can have a mix of types
- if lookuptype == "kern" then
- resources.lookuptypes[name] = "position"
- else
- resources.lookuptypes[name] = lookuptype
- end
- for u, c in next, coverage do
- local description = descriptions[u]
- local slookups = description.slookups
- if slookups then
- slookups[name] = c
- else
- description.slookups = { [name] = c }
- end
- end
- return name
- end
-}
-
--- moved from font-oth.lua
-
-local function getgsub(tfmdata,k,kind)
- local description = tfmdata.descriptions[k]
- if description then
- local slookups = description.slookups -- we assume only slookups (we can always extend)
- if slookups then
- local shared = tfmdata.shared
- local rawdata = shared and shared.rawdata
- if rawdata then
- local lookuptypes = rawdata.resources.lookuptypes
- if lookuptypes then
- local properties = tfmdata.properties
- -- we could cache these
- local validlookups, lookuplist = otf.collectlookups(rawdata,kind,properties.script,properties.language)
- if validlookups then
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local found = slookups[lookup]
- if found then
- return found, lookuptypes[lookup]
- end
- end
- end
- end
- end
- end
- end
-end
-
-otf.getgsub = getgsub -- returns value, gsub_kind
-
-function otf.getsubstitution(tfmdata,k,kind,value)
- local found, kind = getgsub(tfmdata,k,kind)
- if not found then
- --
- elseif kind == "substitution" then
- return found
- elseif kind == "alternate" then
- local choice = tonumber(value) or 1 -- no random here (yet)
- return found[choice] or found[1] or k
- end
- return k
-end
-
-otf.getalternate = otf.getsubstitution
-
-function otf.getmultiple(tfmdata,k,kind)
- local found, kind = getgsub(tfmdata,k,kind)
- if found and kind == "multiple" then
- return found
- end
- return { k }
-end
-
-function otf.getkern(tfmdata,left,right,kind)
- local kerns = getgsub(tfmdata,left,kind or "kern",true) -- for now we use getsub
- if kerns then
- local found = kerns[right]
- local kind = type(found)
- if kind == "table" then
- found = found[1][3] -- can be more clever
- elseif kind ~= "number" then
- found = false
- end
- if found then
- return found * tfmdata.parameters.factor
- end
- end
- return 0
-end
-
-
-registerotfenhancer("prepare tables", enhance_prepare_tables)
-
-registerotfenhancer("prepare glyphs", enhance_prepare_glyphs)
-registerotfenhancer("prepare lookups", enhance_prepare_lookups)
-
-registerotfenhancer("analyze glyphs", enhance_analyze_glyphs)
-registerotfenhancer("analyze math", enhance_analyze_math)
-
-registerotfenhancer("reorganize lookups", enhance_reorganize_lookups)
-registerotfenhancer("reorganize mark classes", enhance_reorganize_mark_classes)
-registerotfenhancer("reorganize anchor classes", enhance_reorganize_anchor_classes)
-
-registerotfenhancer("reorganize glyph kerns", enhance_reorganize_glyph_kerns)
-registerotfenhancer("reorganize glyph lookups", enhance_reorganize_glyph_lookups)
-registerotfenhancer("reorganize glyph anchors", enhance_reorganize_glyph_anchors)
-
-registerotfenhancer("merge kern classes", enhance_merge_kern_classes)
-
-registerotfenhancer("reorganize features", enhance_reorganize_features)
-registerotfenhancer("reorganize subtables", enhance_reorganize_subtables)
-
-registerotfenhancer("check glyphs", enhance_check_glyphs)
-registerotfenhancer("check metadata", enhance_check_metadata)
-
-registerotfenhancer("prepare tounicode", enhance_prepare_tounicode)
-
-registerotfenhancer("check encoding", enhance_check_encoding)
-registerotfenhancer("add duplicates", enhance_add_duplicates)
-
-registerotfenhancer("expand lookups", enhance_expand_lookups)
-
-registerotfenhancer("check extra features", function() end) --placeholder, will be overloaded
-
-registerotfenhancer("cleanup tables", enhance_cleanup_tables)
-
-registerotfenhancer("compact lookups", enhance_compact_lookups)
-registerotfenhancer("purge names", enhance_purge_names)
diff --git a/tex/context/base/mkiv/font-otj.lua b/tex/context/base/mkiv/font-otj.lua
index 2c79500e7..1f9fd1ac1 100644
--- a/tex/context/base/mkiv/font-otj.lua
+++ b/tex/context/base/mkiv/font-otj.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['font-otj'] = {
}
-- This property based variant is not faster but looks nicer than the attribute one. We
--- need to use rawget (which is apbout 4 times slower than a direct access but we cannot
+-- need to use rawget (which is about 4 times slower than a direct access but we cannot
-- get/set that one for our purpose! This version does a bit more with discretionaries
-- (and Kai has tested it with his collection of weird fonts.)
@@ -21,7 +21,13 @@ if not modules then modules = { } end modules ['font-otj'] = {
-- The use_advance code was just a test and is meant for testing and manuals. There is no
-- performance (or whatever) gain and using kerns is somewhat cleaner (at least for now).
--- Maybe: subtype fontkern when pure kerns.
+-- An alternative is to have a list per base of all marks and then do a run over the node
+-- list that resolves the accumulated l/r/x/y and then do an inject pass.
+
+-- if needed we can flag a kern node as immutable
+
+-- The thing with these positioning options is that it is not clear what Uniscribe does with
+-- the 2rl flag and we keep oscillating a between experiments.
if not nodes.properties then return end
@@ -43,6 +49,7 @@ local attributes, nodes, node = attributes, nodes, node
fonts = fonts
local hashes = fonts.hashes
local fontdata = hashes.identifiers
+local fontmarks = hashes.marks
----- parameters = fonts.hashes.parameters -- not in generic
----- resources = fonts.hashes.resources -- not in generic
@@ -62,12 +69,9 @@ local glue_code = nodecodes.glue
local nuts = nodes.nuts
local nodepool = nuts.pool
-local newkern = nodepool.kern
-
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
local setfield = nuts.setfield
local getnext = nuts.getnext
local getprev = nuts.getprev
@@ -93,65 +97,70 @@ local insert_node_after = nuts.insert_after
local properties = nodes.properties.data
-function injections.installnewkern(nk)
- newkern = nk or newkern
-end
+local fontkern = nuts.pool and nuts.pool.fontkern -- context
+local italickern = nuts.pool and nuts.pool.italickern -- context
+
+local useitalickerns = false
+
+directives.register("fonts.injections.useitalics", function(v)
+ if v then
+ report_injections("using italics for space kerns (tracing only)")
+ end
+ useitalickerns = v
+end)
+
+do if not fontkern then -- generic
+
+ local thekern = nuts.new("kern",0) -- fontkern
+ local setkern = nuts.setkern
+ local copy_node = nuts.copy_node
-local nofregisteredkerns = 0
-local nofregisteredpairs = 0
-local nofregisteredmarks = 0
-local nofregisteredcursives = 0
-local keepregisteredcounts = false
+ fontkern = function(k)
+ local n = copy_node(thekern)
+ setkern(n,k)
+ return n
+ end
+
+ local thekern = nuts.new("kern",3) -- italiccorrection
+ local setkern = nuts.setkern
+ local copy_node = nuts.copy_node
+
+ italickern = function(k)
+ local n = copy_node(thekern)
+ setkern(n,k)
+ return n
+ end
+
+end end
+
+function injections.installnewkern() end -- obsolete
+
+local nofregisteredkerns = 0
+local nofregisteredpositions = 0
+local nofregisteredmarks = 0
+local nofregisteredcursives = 0
+local keepregisteredcounts = false
function injections.keepcounts()
keepregisteredcounts = true
end
function injections.resetcounts()
- nofregisteredkerns = 0
- nofregisteredpairs = 0
- nofregisteredmarks = 0
- nofregisteredcursives = 0
- keepregisteredcounts = false
+ nofregisteredkerns = 0
+ nofregisteredpositions = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ keepregisteredcounts = false
end
-- We need to make sure that a possible metatable will not kick in unexpectedly.
--- function injections.reset(n)
--- local p = rawget(properties,n)
--- if p and rawget(p,"injections") then
--- p.injections = nil
--- end
--- end
-
--- function injections.copy(target,source)
--- local sp = rawget(properties,source)
--- if sp then
--- local tp = rawget(properties,target)
--- local si = rawget(sp,"injections")
--- if si then
--- si = fastcopy(si)
--- if tp then
--- tp.injections = si
--- else
--- properties[target] = {
--- injections = si,
--- }
--- end
--- else
--- if tp then
--- tp.injections = nil
--- end
--- end
--- end
--- end
-
function injections.reset(n)
local p = rawget(properties,n)
if p then
- p.injections = false -- { }
+ p.injections = false -- { } -- nil should work too as we use rawget
else
- properties[n] = false -- { injections = { } }
+ properties[n] = false -- { injections = { } } -- nil should work too as we use rawget
end
end
@@ -159,7 +168,6 @@ function injections.copy(target,source)
local sp = rawget(properties,source)
if sp then
local tp = rawget(properties,target)
- -- local si = rawget(sp,"injections")
local si = sp.injections
if si then
si = fastcopy(si)
@@ -185,10 +193,9 @@ function injections.copy(target,source)
end
end
-function injections.setligaindex(n,index)
+function injections.setligaindex(n,index) -- todo: don't set when 0
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
if i then
i.ligaindex = index
@@ -209,7 +216,6 @@ end
function injections.getligaindex(n,default)
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
if i then
return i.ligaindex or default
@@ -218,7 +224,15 @@ function injections.getligaindex(n,default)
return default
end
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext) -- hm: nuts or nodes
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext,r2lflag)
+
+ -- The standard says something about the r2lflag related to the first in a series:
+ --
+ -- When this bit is set, the last glyph in a given sequence to which the cursive
+ -- attachment lookup is applied, will be positioned on the baseline.
+ --
+ -- But it looks like we don't need to consider it.
+
local dx = factor*(exit[1]-entry[1])
local dy = -factor*(exit[2]-entry[2])
local ws = tfmstart.width
@@ -236,7 +250,6 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
--
local p = rawget(properties,start)
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
if i then
i.cursiveanchor = true
@@ -254,7 +267,6 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
end
local p = rawget(properties,nxt)
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
if i then
i.cursivex = dx
@@ -276,7 +288,9 @@ function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmne
return dx, dy, nofregisteredcursives
end
-function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2lflag & tfmchr not used
+-- kind: 0=single 1=first of pair, 2=second of pair
+
+function injections.setposition(kind,current,factor,rlmode,spec,injection)
local x = factor*spec[1]
local y = factor*spec[2]
local w = factor*spec[3]
@@ -286,7 +300,7 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2l
local leftkern = x -- both kerns are set in a pair kern compared
local rightkern = w - x -- to normal kerns where we set only leftkern
if leftkern ~= 0 or rightkern ~= 0 or yoffset ~= 0 then
- nofregisteredpairs = nofregisteredpairs + 1
+ nofregisteredpositions = nofregisteredpositions + 1
if rlmode and rlmode < 0 then
leftkern, rightkern = rightkern, leftkern
end
@@ -295,8 +309,7 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2l
end
local p = rawget(properties,current)
if p then
- -- local i = p[injection]
- local i = rawget(p,injection)
+ local i = p[injection]
if i then
if leftkern ~= 0 then
i.leftkern = (i.leftkern or 0) + leftkern
@@ -333,15 +346,15 @@ function injections.setpair(current,factor,rlmode,r2lflag,spec,injection) -- r2l
},
}
end
- return x, y, w, h, nofregisteredpairs
+ return x, y, w, h, nofregisteredpositions
end
end
return x, y, w, h -- no bound
end
--- This needs checking for rl < 0 but it is unlikely that a r2l script uses kernclasses between
--- glyphs so we're probably safe (KE has a problematic font where marks interfere with rl < 0 in
--- the previous case)
+-- The next one is used for simple kerns coming from a truetype kern table. The r2l
+-- variant variant needs checking but it is unlikely that a r2l script uses thsi
+-- feature.
function injections.setkern(current,factor,rlmode,x,injection)
local dx = factor * x
@@ -352,8 +365,7 @@ function injections.setkern(current,factor,rlmode,x,injection)
injection = "injections"
end
if p then
- -- local i = rawget(p,injection)
- local i = rawget(p,injection)
+ local i = p[injection]
if i then
i.leftkern = dx + (i.leftkern or 0)
else
@@ -374,8 +386,62 @@ function injections.setkern(current,factor,rlmode,x,injection)
end
end
-function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase,mkmk,checkmark) -- ba=baseanchor, ma=markanchor
+-- This one is an optimization of pairs where we have only a "w" entry. This one is
+-- potentially different from the previous one wrt r2l. It needs checking. The
+-- optimization relates to smaller tma files.
+function injections.setmove(current,factor,rlmode,x,injection)
+ local dx = factor * x
+ if dx ~= 0 then
+ nofregisteredkerns = nofregisteredkerns + 1
+ local p = rawget(properties,current)
+ if not injection then
+ injection = "injections"
+ end
+ if rlmode and rlmode < 0 then
+ -- we need to swap with a single so then we also need to to it here
+ -- as move is just a simple single
+ if p then
+ local i = p[injection]
+ if i then
+ i.rightkern = dx + (i.rightkern or 0)
+ else
+ p[injection] = {
+ rightkern = dx,
+ }
+ end
+ else
+ properties[current] = {
+ [injection] = {
+ rightkern = dx,
+ },
+ }
+ end
+ else
+ if p then
+ local i = p[injection]
+ if i then
+ i.leftkern = dx + (i.leftkern or 0)
+ else
+ p[injection] = {
+ leftkern = dx,
+ }
+ end
+ else
+ properties[current] = {
+ [injection] = {
+ leftkern = dx,
+ },
+ }
+ end
+ end
+ return dx, nofregisteredkerns
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase,mkmk,checkmark) -- ba=baseanchor, ma=markanchor
local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
nofregisteredmarks = nofregisteredmarks + 1
if rlmode >= 0 then
@@ -384,15 +450,20 @@ function injections.setmark(start,base,factor,rlmode,ba,ma,tfmbase,mkmk,checkmar
local p = rawget(properties,start)
-- hm, dejavu serif does a sloppy mark2mark before mark2base
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
if i then
if i.markmark then
-- out of order mkmk: yes or no or option
else
- i.markx = dx
- i.marky = dy
- i.markdir = rlmode or 0
+ if dx ~= 0 then
+ i.markx = dx
+ end
+ if y ~= 0 then
+ i.marky = dy
+ end
+ if rlmode then
+ i.markdir = rlmode
+ end
i.markbase = nofregisteredmarks
i.markbasenode = base
i.markmark = mkmk
@@ -438,7 +509,7 @@ local function show(n,what,nested,symbol)
if n then
local p = rawget(properties,n)
if p then
- local i = rawget(p,what)
+ local i = p[what]
if i then
local leftkern = i.leftkern or 0
local rightkern = i.rightkern or 0
@@ -480,7 +551,7 @@ end
local function showsub(n,what,where)
report_injections("begin subrun: %s",where)
- for n in traverse_id(glyph_code,n) do
+ for n in traverse_char(n) do
showchar(n,where)
show(n,what,where," ")
end
@@ -488,8 +559,9 @@ local function showsub(n,what,where)
end
local function trace(head,where)
- report_injections("begin run %s: %s kerns, %s pairs, %s marks and %s cursives registered",
- where or "",nofregisteredkerns,nofregisteredpairs,nofregisteredmarks,nofregisteredcursives)
+ report_injections()
+ report_injections("begin run %s: %s kerns, %s positions, %s marks and %s cursives registered",
+ where or "",nofregisteredkerns,nofregisteredpositions,nofregisteredmarks,nofregisteredcursives)
local n = head
while n do
local id = getid(n)
@@ -537,6 +609,7 @@ local function show_result(head)
end
current = getnext(current)
end
+ report_injections()
end
-- G +D-pre G
@@ -556,7 +629,7 @@ local function inject_kerns_only(head,where)
local prev = nil
local next = nil
local prevdisc = nil
- local prevglyph = nil
+ -- local prevglyph = nil
local pre = nil -- saves a lookup
local post = nil -- saves a lookup
local replace = nil -- saves a lookup
@@ -569,46 +642,42 @@ local function inject_kerns_only(head,where)
if char then
local p = rawget(properties,current)
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
if i then
-- left|glyph|right
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- head = insert_node_before(head,current,newkern(leftkern))
+ head = insert_node_before(head,current,fontkern(leftkern))
end
end
if prevdisc then
local done = false
if post then
- -- local i = rawget(p,"postinjections")
local i = p.postinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setlink(posttail,newkern(leftkern))
+ setlink(posttail,fontkern(leftkern))
done = true
end
end
end
if replace then
- -- local i = rawget(p,"replaceinjections")
local i = p.replaceinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setlink(replacetail,newkern(leftkern))
+ setlink(replacetail,fontkern(leftkern))
done = true
end
end
else
- -- local i = rawget(p,"emptyinjections")
local i = p.emptyinjections
if i then
-- glyph|disc|glyph (special case)
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setfield(prev,"replace",newkern(leftkern)) -- maybe also leftkern
+ setfield(prev,"replace",fontkern(leftkern)) -- maybe also leftkern
end
end
end
@@ -618,11 +687,11 @@ local function inject_kerns_only(head,where)
end
end
prevdisc = nil
- prevglyph = current
+ -- prevglyph = current
elseif char == false then
-- other font
prevdisc = nil
- prevglyph = current
+ -- prevglyph = current
elseif id == disc_code then
pre, post, replace, pretail, posttail, replacetail = getdisc(current,true)
local done = false
@@ -631,12 +700,11 @@ local function inject_kerns_only(head,where)
for n in traverse_char(pre) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"preinjections")
local i = p.injections or p.preinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- pre = insert_node_before(pre,n,newkern(leftkern))
+ pre = insert_node_before(pre,n,fontkern(leftkern))
done = true
end
end
@@ -648,12 +716,11 @@ local function inject_kerns_only(head,where)
for n in traverse_char(post) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"postinjections")
local i = p.injections or p.postinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- post = insert_node_before(post,n,newkern(leftkern))
+ post = insert_node_before(post,n,fontkern(leftkern))
done = true
end
end
@@ -665,12 +732,11 @@ local function inject_kerns_only(head,where)
for n in traverse_char(replace) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"replaceinjections")
local i = p.injections or p.replaceinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- replace = insert_node_before(replace,n,newkern(leftkern))
+ replace = insert_node_before(replace,n,fontkern(leftkern))
done = true
end
end
@@ -680,10 +746,10 @@ local function inject_kerns_only(head,where)
if done then
setdisc(current,pre,post,replace)
end
- prevglyph = nil
+ -- prevglyph = nil
prevdisc = current
else
- prevglyph = nil
+ -- prevglyph = nil
prevdisc = nil
end
prev = current
@@ -695,13 +761,16 @@ local function inject_kerns_only(head,where)
else
nofregisteredkerns = 0
end
+ if trace_injections then
+ show_result(head)
+ end
return tonode(head), true
end
-local function inject_pairs_only(head,where)
+local function inject_positions_only(head,where)
head = tonut(head)
if trace_injections then
- trace(head,"pairs")
+ trace(head,"positions")
end
local current = head
local prev = nil
@@ -720,7 +789,6 @@ local function inject_pairs_only(head,where)
if char then
local p = rawget(properties,current)
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
if i then
-- left|glyph|right
@@ -728,27 +796,30 @@ local function inject_pairs_only(head,where)
if yoffset and yoffset ~= 0 then
setoffsets(current,false,yoffset)
end
- local leftkern = i.leftkern
+ local leftkern = i.leftkern
+ local rightkern = i.rightkern
if leftkern and leftkern ~= 0 then
- head = insert_node_before(head,current,newkern(leftkern))
+ if rightkern and leftkern == -rightkern then
+ setoffsets(current,leftkern,false)
+ rightkern = 0
+ else
+ head = insert_node_before(head,current,fontkern(leftkern))
+ end
end
- local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(head,current,newkern(rightkern))
+ insert_node_after(head,current,fontkern(rightkern))
end
else
- -- local i = rawget(p,"emptyinjections")
local i = p.emptyinjections
if i then
-- glyph|disc|glyph (special case)
--- is this okay?
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
if next and getid(next) == disc_code then
if replace then
-- error, we expect an empty one
else
- setfield(next,"replace",newkern(rightkern)) -- maybe also leftkern
+ setfield(next,"replace",fontkern(rightkern)) -- maybe also leftkern
end
end
end
@@ -757,23 +828,21 @@ local function inject_pairs_only(head,where)
if prevdisc then
local done = false
if post then
- -- local i = rawget(p,"postinjections")
local i = p.postinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setlink(posttail,newkern(leftkern))
+ setlink(posttail,fontkern(leftkern))
done = true
end
end
end
if replace then
- -- local i = rawget(p,"replaceinjections")
local i = p.replaceinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setlink(replacetail,newkern(leftkern))
+ setlink(replacetail,fontkern(leftkern))
done = true
end
end
@@ -783,7 +852,7 @@ local function inject_pairs_only(head,where)
-- new .. okay?
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setfield(prev,"replace",newkern(leftkern)) -- maybe also leftkern
+ setfield(prev,"replace",fontkern(leftkern)) -- maybe also leftkern
end
end
end
@@ -805,7 +874,6 @@ local function inject_pairs_only(head,where)
for n in traverse_char(pre) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"preinjections")
local i = p.injections or p.preinjections
if i then
local yoffset = i.yoffset
@@ -814,12 +882,12 @@ local function inject_pairs_only(head,where)
end
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- pre = insert_node_before(pre,n,newkern(leftkern))
+ pre = insert_node_before(pre,n,fontkern(leftkern))
done = true
end
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(pre,n,newkern(rightkern))
+ insert_node_after(pre,n,fontkern(rightkern))
done = true
end
end
@@ -831,7 +899,6 @@ local function inject_pairs_only(head,where)
for n in traverse_char(post) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"postinjections")
local i = p.injections or p.postinjections
if i then
local yoffset = i.yoffset
@@ -840,12 +907,12 @@ local function inject_pairs_only(head,where)
end
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- post = insert_node_before(post,n,newkern(leftkern))
+ post = insert_node_before(post,n,fontkern(leftkern))
done = true
end
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(post,n,newkern(rightkern))
+ insert_node_after(post,n,fontkern(rightkern))
done = true
end
end
@@ -857,7 +924,6 @@ local function inject_pairs_only(head,where)
for n in traverse_char(replace) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"replaceinjections")
local i = p.injections or p.replaceinjections
if i then
local yoffset = i.yoffset
@@ -866,12 +932,12 @@ local function inject_pairs_only(head,where)
end
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- replace = insert_node_before(replace,n,newkern(leftkern))
+ replace = insert_node_before(replace,n,fontkern(leftkern))
done = true
end
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(replace,n,newkern(rightkern))
+ insert_node_after(replace,n,fontkern(rightkern))
done = true
end
end
@@ -882,13 +948,12 @@ local function inject_pairs_only(head,where)
if pre then
local p = rawget(properties,prevglyph)
if p then
- -- local i = rawget(p,"preinjections")
local i = p.preinjections
if i then
-- glyph|pre glyphs
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- pre = insert_node_before(pre,pre,newkern(rightkern))
+ pre = insert_node_before(pre,pre,fontkern(rightkern))
done = true
end
end
@@ -897,13 +962,12 @@ local function inject_pairs_only(head,where)
if replace then
local p = rawget(properties,prevglyph)
if p then
- -- local i = rawget(p,"replaceinjections")
local i = p.replaceinjections
if i then
-- glyph|replace glyphs
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- replace = insert_node_before(replace,replace,newkern(rightkern))
+ replace = insert_node_before(replace,replace,fontkern(rightkern))
done = true
end
end
@@ -926,7 +990,10 @@ local function inject_pairs_only(head,where)
if keepregisteredcounts then
keepregisteredcounts = false
else
- nofregisteredkerns = 0
+ nofregisteredpositions = 0
+ end
+ if trace_injections then
+ show_result(head)
end
return tonode(head), true
end
@@ -934,9 +1001,7 @@ end
local function showoffset(n,flag)
local x, y = getoffsets(n)
if x ~= 0 or y ~= 0 then
- setcolor(n,flag and "darkred" or "darkgreen")
- else
- resetcolor(n)
+ setcolor(n,"darkgray")
end
end
@@ -950,8 +1015,6 @@ local function inject_everything(head,where)
--
local current = head
local last = nil
- local font = font
- local markdata = nil
local prev = nil
local next = nil
local prevdisc = nil
@@ -979,46 +1042,46 @@ local function inject_everything(head,where)
local rightkern = nil
local pp = rawget(properties,p)
if pp then
- -- pp = rawget(pp,"injections")
pp = pp.injections
if pp then
rightkern = pp.rightkern
end
end
+ local markdir = pn.markdir
if rightkern then -- x and w ~= 0
- if pn.markdir < 0 then
+ ox = px - (pn.markx or 0) - rightkern
+ if markdir and markdir < 0 then
-- kern(w-x) glyph(p) kern(x) mark(n)
- ox = px - pn.markx - rightkern
- -- report_injections("r2l case 1: %p",ox)
+ if not pn.markmark then
+ ox = ox + (pn.leftkern or 0)
+ end
else
-- kern(x) glyph(p) kern(w-x) mark(n)
- -- ox = px - getwidth(p) + pn.markx - pp.leftkern
--
-- According to Kai we don't need to handle leftkern here but I'm
-- pretty sure I've run into a case where it was needed so maybe
-- some day we need something more clever here.
--
+ -- maybe we need to apply both then
+ --
if false then
- -- a mark with kerning
+ -- a mark with kerning (maybe husayni needs it )
local leftkern = pp.leftkern
if leftkern then
- ox = px - pn.markx - leftkern
- else
- ox = px - pn.markx
+ ox = ox - leftkern
end
- else
- ox = px - pn.markx - rightkern -- seguiemj needs the rightkern
end
end
else
- -- if pn.markdir < 0 then
- -- ox = px - pn.markx
- -- -- report_injections("r2l case 3: %p",ox)
- -- else
- -- -- ox = px - getwidth(p) + pn.markx
- ox = px - pn.markx
- -- report_injections("l2r case 3: %p",ox)
- -- end
+ ox = px - (pn.markx or 0)
+ if markdir and markdir < 0 then
+ if not pn.markmark then
+ local leftkern = pn.leftkern
+ if leftkern then
+ ox = ox + leftkern -- husayni needs it
+ end
+ end
+ end
if pn.checkmark then
local wn = getwidth(n) -- in arial marks have widths
if wn and wn ~= 0 then
@@ -1027,34 +1090,99 @@ local function inject_everything(head,where)
report_injections("correcting non zero width mark %C",getchar(n))
end
-- -- bad: we should center
+ --
-- pn.leftkern = -wn
-- pn.rightkern = -wn
- -- -- we're too late anyway as kerns are already injected so
- -- -- we do it the ugly way (no checking if the previous is
- -- -- already a kern) .. maybe we should fix the font instead
- -- hm, no head ?
- insert_node_before(n,n,newkern(-wn))
- insert_node_after(n,n,newkern(-wn))
+ --
+ -- -- we're too late anyway as kerns are already injected so we do it the
+ -- -- ugly way (no checking if the previous is already a kern) .. maybe we
+ -- -- should fix the font instead
+ --
+ -- todo: head and check for prev / next kern
+ --
+ insert_node_before(n,n,fontkern(-wn))
+ insert_node_after(n,n,fontkern(-wn))
end
end
end
- local oy = ny + py + pn.marky
+ local oy = ny + py + (pn.marky or 0)
+ if not pn.markmark then
+ local yoffset = pn.yoffset
+ if yoffset then
+ oy = oy + yoffset -- husayni needs it
+ end
+ end
setoffsets(n,ox,oy)
if trace_marks then
showoffset(n,true)
end
end
- -- todo: marks in disc
+ -- begin of temp fix --
+ local base = nil -- bah, some arabic fonts have no mark anchoring
+ -- end of temp fix --
while current do
local next = getnext(current)
local char, id = ischar(current)
if char then
local p = rawget(properties,current)
+ -- begin of temp fix --
+ if hascursives then
+ if not p then
+ local m = fontmarks[getfont(current)]
+ if m and m[char] then
+ if base then
+ p = { injections = { markbasenode = base } }
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = current
+ properties[current] = p
+ hasmarks = true
+ end
+ else
+ base = current
+ end
+ end
+ end
+ -- end of temp fix
if p then
- -- local i = rawget(p,"injections")
local i = p.injections
+ -- begin of temp fix --
+ if hascursives then
+ if not i then
+ local m = fontmarks[getfont(current)]
+ if m and m[char] then
+ if base then
+ i = { markbasenode = base }
+ nofmarks = nofmarks + 1
+ marks[nofmarks] = current
+ p.injections = i
+ hasmarks = true
+ end
+ else
+ base = current
+ end
+ end
+ end
+ -- end of temp fix --
if i then
local pm = i.markbasenode
+ -- begin of temp fix --
+ if hascursives then
+ if not pm then
+ local m = fontmarks[getfont(current)]
+ if m and m[char] then
+ if base then
+ pm = base
+ i.markbasenode = pm
+ hasmarks = true
+ end
+ else
+ base = current
+ end
+ else
+ base = current
+ end
+ end
+ -- end of temp fix --
if pm then
nofmarks = nofmarks + 1
marks[nofmarks] = current
@@ -1115,28 +1243,31 @@ local function inject_everything(head,where)
end
end
-- left|glyph|right
- local leftkern = i.leftkern
+ local leftkern = i.leftkern
+ local rightkern = i.rightkern
if leftkern and leftkern ~= 0 then
- head = insert_node_before(head,current,newkern(leftkern))
+ if rightkern and leftkern == -rightkern then
+ setoffsets(current,leftkern,false)
+ rightkern = 0
+ else
+ head = insert_node_before(head,current,fontkern(leftkern))
+ end
end
- local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(head,current,newkern(rightkern))
+ insert_node_after(head,current,fontkern(rightkern))
end
end
else
- -- local i = rawget(p,"emptyinjections")
local i = p.emptyinjections
if i then
-- glyph|disc|glyph (special case)
- -- okay?
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
if next and getid(next) == disc_code then
if replace then
-- error, we expect an empty one
else
- setfield(next,"replace",newkern(rightkern)) -- maybe also leftkern
+ setfield(next,"replace",fontkern(rightkern)) -- maybe also leftkern
end
end
end
@@ -1146,33 +1277,30 @@ local function inject_everything(head,where)
if p then
local done = false
if post then
- -- local i = rawget(p,"postinjections")
local i = p.postinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setlink(posttail,newkern(leftkern))
+ setlink(posttail,fontkern(leftkern))
done = true
end
end
end
if replace then
- -- local i = rawget(p,"replaceinjections")
local i = p.replaceinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setlink(replacetail,newkern(leftkern))
+ setlink(replacetail,fontkern(leftkern))
done = true
end
end
else
- -- local i = rawget(p,"emptyinjections")
local i = p.emptyinjections
if i then
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- setfield(prev,"replace",newkern(leftkern)) -- maybe also leftkern
+ setfield(prev,"replace",fontkern(leftkern)) -- maybe also leftkern
end
end
end
@@ -1208,7 +1336,6 @@ local function inject_everything(head,where)
for n in traverse_char(pre) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"preinjections")
local i = p.injections or p.preinjections
if i then
local yoffset = i.yoffset
@@ -1217,12 +1344,12 @@ local function inject_everything(head,where)
end
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- pre = insert_node_before(pre,n,newkern(leftkern))
+ pre = insert_node_before(pre,n,fontkern(leftkern))
done = true
end
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(pre,n,newkern(rightkern))
+ insert_node_after(pre,n,fontkern(rightkern))
done = true
end
if hasmarks then
@@ -1240,7 +1367,6 @@ local function inject_everything(head,where)
for n in traverse_char(post) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"postinjections")
local i = p.injections or p.postinjections
if i then
local yoffset = i.yoffset
@@ -1249,12 +1375,12 @@ local function inject_everything(head,where)
end
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- post = insert_node_before(post,n,newkern(leftkern))
+ post = insert_node_before(post,n,fontkern(leftkern))
done = true
end
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(post,n,newkern(rightkern))
+ insert_node_after(post,n,fontkern(rightkern))
done = true
end
if hasmarks then
@@ -1272,7 +1398,6 @@ local function inject_everything(head,where)
for n in traverse_char(replace) do
local p = rawget(properties,n)
if p then
- -- local i = rawget(p,"injections") or rawget(p,"replaceinjections")
local i = p.injections or p.replaceinjections
if i then
local yoffset = i.yoffset
@@ -1281,12 +1406,12 @@ local function inject_everything(head,where)
end
local leftkern = i.leftkern
if leftkern and leftkern ~= 0 then
- replace = insert_node_before(replace,n,newkern(leftkern))
+ replace = insert_node_before(replace,n,fontkern(leftkern))
done = true
end
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- insert_node_after(replace,n,newkern(rightkern))
+ insert_node_after(replace,n,fontkern(rightkern))
done = true
end
if hasmarks then
@@ -1303,13 +1428,12 @@ local function inject_everything(head,where)
if pre then
local p = rawget(properties,prevglyph)
if p then
- -- local i = rawget(p,"preinjections")
local i = p.preinjections
if i then
-- glyph|pre glyphs
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- pre = insert_node_before(pre,pre,newkern(rightkern))
+ pre = insert_node_before(pre,pre,fontkern(rightkern))
done = true
end
end
@@ -1318,13 +1442,12 @@ local function inject_everything(head,where)
if replace then
local p = rawget(properties,prevglyph)
if p then
- -- local i = rawget(p,"replaceinjections")
local i = p.replaceinjections
if i then
-- glyph|replace glyphs
local rightkern = i.rightkern
if rightkern and rightkern ~= 0 then
- replace = insert_node_before(replace,replace,newkern(rightkern))
+ replace = insert_node_before(replace,replace,fontkern(rightkern))
done = true
end
end
@@ -1339,6 +1462,7 @@ local function inject_everything(head,where)
else
prevglyph = nil
prevdisc = nil
+base = nil
end
prev = current
current = next
@@ -1360,7 +1484,6 @@ local function inject_everything(head,where)
for i=1,nofmarks do
local m = marks[i]
local p = rawget(properties,m)
- -- local i = rawget(p,"injections")
local i = p.injections
local b = i.markbasenode
processmark(b,m,i)
@@ -1372,10 +1495,13 @@ local function inject_everything(head,where)
if keepregisteredcounts then
keepregisteredcounts = false
else
- nofregisteredkerns = 0
- nofregisteredpairs = 0
- nofregisteredmarks = 0
- nofregisteredcursives = 0
+ nofregisteredkerns = 0
+ nofregisteredpositions = 0
+ nofregisteredmarks = 0
+ nofregisteredcursives = 0
+ end
+ if trace_injections then
+ show_result(head)
end
return tonode(head), true
end
@@ -1468,7 +1594,6 @@ local function injectspaces(head)
if not triggers then
return head, false
end
-
local lastfont = nil
local spacekerns = nil
local leftkerns = nil
@@ -1477,6 +1602,7 @@ local function injectspaces(head)
local threshold = 0
local leftkern = false
local rightkern = false
+ local nuthead = tonut(head)
local function updatefont(font,trig)
leftkerns = trig.left
@@ -1486,7 +1612,7 @@ local function injectspaces(head)
factor = getthreshold(font)
end
- for n in traverse_id(glue_code,tonut(head)) do
+ for n in traverse_id(glue_code,nuthead) do
local prev, next = getspaceboth(n)
local prevchar = prev and ischar(prev)
local nextchar = next and ischar(next)
@@ -1518,29 +1644,59 @@ local function injectspaces(head)
local old = getwidth(n)
if old > threshold then
if rightkern then
- local new = old + (leftkern + rightkern) * factor
- if trace_spaces then
- report_spaces("%C [%p -> %p] %C",prevchar,old,new,nextchar)
+ if useitalickerns then
+ local lnew = leftkern * factor
+ local rnew = rightkern * factor
+ if trace_spaces then
+ report_spaces("%C [%p + %p + %p] %C",prevchar,lnew,old,rnew,nextchar)
+ end
+ local h = insert_node_before(nuthead,n,italickern(lnew))
+ if h == nuthead then
+ head = tonode(h)
+ nuthead = h
+ end
+ insert_node_after(nuthead,n,italickern(rnew))
+ else
+ local new = old + (leftkern + rightkern) * factor
+ if trace_spaces then
+ report_spaces("%C [%p -> %p] %C",prevchar,old,new,nextchar)
+ end
+ setwidth(n,new)
end
- setwidth(n,new)
- leftkern = false
+ rightkern = false
else
- local new = old + leftkern * factor
- if trace_spaces then
- report_spaces("%C [%p -> %p]",prevchar,old,new)
+ if useitalickerns then
+ local new = leftkern * factor
+ if trace_spaces then
+ report_spaces("%C [%p + %p]",prevchar,old,new)
+ end
+ insert_node_after(nuthead,n,italickern(new)) -- tricky with traverse but ok
+ else
+ local new = old + leftkern * factor
+ if trace_spaces then
+ report_spaces("%C [%p -> %p]",prevchar,old,new)
+ end
+ setwidth(n,new)
end
- setwidth(n,new)
end
end
leftkern = false
elseif rightkern then
local old = getwidth(n)
if old > threshold then
- local new = old + rightkern * factor
- if trace_spaces then
- report_spaces("[%p -> %p] %C",nextchar,old,new)
+ if useitalickerns then
+ local new = rightkern * factor
+ if trace_spaces then
+ report_spaces("%C [%p + %p]",nextchar,old,new)
+ end
+ insert_node_after(nuthead,n,italickern(new))
+ else
+ local new = old + rightkern * factor
+ if trace_spaces then
+ report_spaces("[%p -> %p] %C",nextchar,old,new)
+ end
+ setwidth(n,new)
end
- setwidth(n,new)
end
rightkern = false
end
@@ -1562,11 +1718,11 @@ function injections.handler(head,where)
report_injections("injection variant %a","everything")
end
return inject_everything(head,where)
- elseif nofregisteredpairs > 0 then
+ elseif nofregisteredpositions > 0 then
if trace_injections then
- report_injections("injection variant %a","pairs")
+ report_injections("injection variant %a","positions")
end
- return inject_pairs_only(head,where)
+ return inject_positions_only(head,where)
elseif nofregisteredkerns > 0 then
if trace_injections then
report_injections("injection variant %a","kerns")
diff --git a/tex/context/base/mkiv/font-otl.lua b/tex/context/base/mkiv/font-otl.lua
index 9400096a0..a71e3ad98 100644
--- a/tex/context/base/mkiv/font-otl.lua
+++ b/tex/context/base/mkiv/font-otl.lua
@@ -52,7 +52,7 @@ local report_otf = logs.reporter("fonts","otf loading")
local fonts = fonts
local otf = fonts.handlers.otf
-otf.version = 3.029 -- beware: also sync font-mis.lua and in mtx-fonts
+otf.version = 3.103 -- beware: also sync font-mis.lua and in mtx-fonts
otf.cache = containers.define("fonts", "otl", otf.version, true)
otf.svgcache = containers.define("fonts", "svg", otf.version, true)
otf.sbixcache = containers.define("fonts", "sbix", otf.version, true)
@@ -79,6 +79,8 @@ local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
local syncspace = true
local forcenotdef = false
+local privateoffset = fonts.constructors and fonts.constructors.privateoffset or 0xF0000 -- 0x10FFFF
+
local applyruntimefixes = fonts.treatments and fonts.treatments.applyfixes
local wildcard = "*"
@@ -99,6 +101,33 @@ registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef =
registerotfenhancer("check extra features", function() end) -- placeholder
+-- Kai has memory problems on osx so here is an experiment (I only tested on windows as
+-- my test mac is old and gets no updates and is therefore rather useless.):
+
+local checkmemory = utilities.lua and utilities.lua.checkmemory
+local threshold = 100 -- MB
+local tracememory = false
+
+registertracker("fonts.otf.loader.memory",function(v) tracememory = v end)
+
+if not checkmemory then -- we need a generic plug (this code might move):
+
+ local collectgarbage = collectgarbage
+
+ checkmemory = function(previous,threshold) -- threshold in MB
+ local current = collectgarbage("count")
+ if previous then
+ local checked = (threshold or 64)*1024
+ if current - previous > checked then
+ collectgarbage("collect")
+ current = collectgarbage("count")
+ end
+ end
+ return current
+ end
+
+end
+
function otf.load(filename,sub,instance)
local base = file.basename(file.removesuffix(filename))
local name = file.removesuffix(base) -- already no suffix
@@ -130,9 +159,13 @@ function otf.load(filename,sub,instance)
data = otfreaders.loadfont(filename,sub or 1,instance) -- we can pass the number instead (if it comes from a name search)
if data then
-- todo: make this a plugin
+ local used = checkmemory()
local resources = data.resources
local svgshapes = resources.svgshapes
local sbixshapes = resources.sbixshapes
+ if cleanup == 0 then
+ checkmemory(used,threshold,tracememory)
+ end
if svgshapes then
resources.svgshapes = nil
if otf.svgenabled then
@@ -147,6 +180,11 @@ function otf.load(filename,sub,instance)
timestamp = timestamp,
}
end
+ if cleanup > 1 then
+ collectgarbage("collect")
+ else
+ checkmemory(used,threshold,tracememory)
+ end
end
if sbixshapes then
resources.sbixshapes = nil
@@ -162,18 +200,31 @@ function otf.load(filename,sub,instance)
timestamp = timestamp,
}
end
+ if cleanup > 1 then
+ collectgarbage("collect")
+ else
+ checkmemory(used,threshold,tracememory)
+ end
end
--
otfreaders.compact(data)
+ if cleanup == 0 then
+ checkmemory(used,threshold,tracememory)
+ end
otfreaders.rehash(data,"unicodes")
otfreaders.addunicodetable(data)
otfreaders.extend(data)
+ if cleanup == 0 then
+ checkmemory(used,threshold,tracememory)
+ end
otfreaders.pack(data)
report_otf("loading done")
report_otf("saving %a in cache",filename)
data = containers.write(otf.cache, hash, data)
if cleanup > 1 then
collectgarbage("collect")
+ else
+ checkmemory(used,threshold,tracememory)
end
stoptiming(otfreaders)
if elapsedtime then
@@ -181,10 +232,14 @@ function otf.load(filename,sub,instance)
end
if cleanup > 3 then
collectgarbage("collect")
+ else
+ checkmemory(used,threshold,tracememory)
end
data = containers.read(otf.cache,hash) -- this frees the old table and load the sparse one
if cleanup > 2 then
collectgarbage("collect")
+ else
+ checkmemory(used,threshold,tracememory)
end
else
data = nil
@@ -200,12 +255,12 @@ function otf.load(filename,sub,instance)
otfreaders.expand(data) -- inline tables
otfreaders.addunicodetable(data) -- only when not done yet
--
- otfenhancers.apply(data,filename,data)
+ otfenhancers.apply(data,filename,data) -- in context one can also use treatments
--
-- constructors.addcoreunicodes(data.resources.unicodes) -- still needed ?
--
if applyruntimefixes then
- applyruntimefixes(filename,data)
+ applyruntimefixes(filename,data) -- e.g. see treatments.lfg
end
--
data.metadata.math = data.resources.mathconstants
@@ -285,7 +340,7 @@ local function copytotfm(data,cache_id)
end
if mathspecs then
for unicode, character in next, characters do
- local d = descriptions[unicode]
+ local d = descriptions[unicode] -- we could use parent table here
local m = d.math
if m then
-- watch out: luatex uses horiz_variants for the parts
@@ -447,6 +502,8 @@ local function copytotfm(data,cache_id)
--
-- properties.name = specification.name
-- properties.sub = specification.sub
+ --
+ properties.private = properties.private or data.private or privateoffset
--
return {
characters = characters,
@@ -558,7 +615,7 @@ local function checkmathsize(tfmdata,mathsize)
local parameters = tfmdata.parameters
parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
+ parameters.mathsize = mathsize -- only when a number !
end
end
@@ -768,6 +825,7 @@ otf.coverup = {
multiple = justset,
kern = justset,
pair = justset,
+ single = justset,
ligature = function(coverage,unicode,ligature)
local first = ligature[1]
local tree = coverage[first]
diff --git a/tex/context/base/mkiv/font-otn.lua b/tex/context/base/mkiv/font-otn.lua
deleted file mode 100644
index ace7bf12b..000000000
--- a/tex/context/base/mkiv/font-otn.lua
+++ /dev/null
@@ -1,3929 +0,0 @@
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- this is a context version which can contain experimental code, but when we
--- have serious patches we also need to change the other two font-otn files
-
--- at some point i might decide to convert the whole list into a table and then
--- run over that instead (but it has some drawbacks as we also need to deal with
--- attributes and such so we need to keep a lot of track - which is why i rejected
--- that method - although it has become a bit easier in the meantime so it might
--- become an alternative (by that time i probably have gone completely lua) .. the
--- usual chicken-egg issues ... maybe mkix as it's no real tex any more then
-
--- preprocessors = { "nodes" }
-
--- anchor class : mark, mkmk, curs, mklg (todo)
--- anchor type : mark, basechar, baselig, basemark, centry, cexit, max (todo)
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- default features (per language, script)
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
--- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
--- remove some optimizations (when I have a faster machine)
---
--- beware:
---
--- we do some disc jugling where we need to keep in mind that the
--- pre, post and replace fields can have prev pointers to a nesting
--- node ... i wonder if that is still needed
---
--- not possible:
---
--- \discretionary {alpha-} {betagammadelta}
--- {\discretionary {alphabeta-} {gammadelta}
--- {\discretionary {alphabetagamma-} {delta}
--- {alphabetagammadelta}}}
-
---[[ldx--
-This module is a bit more split up that I'd like but since we also want to test
-with plain it has to be so. This module is part of
-and discussion about improvements and functionality mostly happens on the
- mailing list.
-
-The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.
-
-Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.
-
-After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as users ask for it.
-
-The specification leaves room for interpretation. In case of doubt the microsoft
-implementation is the reference as it is the most complete one. As they deal with
-lots of scripts and fonts, Kai and Ivo did a lot of testing of the generic code and
-their suggestions help improve the code. I'm aware that not all border cases can be
-taken care of, unless we accept excessive runtime, and even then the interference
-with other mechanisms (like hyphenation) are not trivial.
-
-Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
- end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and has to include
-then in the output eventually.
-
-The raw table as it coms from gets reorganized in to fit out needs.
-In that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).
-
-This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.
-
-As with the code, we may decide to store more information in the
- table.
-
-Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the library or code that
-results in different tables.
---ldx]]--
-
--- action handler chainproc
---
--- gsub_single ok ok
--- gsub_multiple ok ok
--- gsub_alternate ok ok
--- gsub_ligature ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- todo: contextpos
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
-
--- We used to have independent hashes for lookups but as the tags are unique
--- we now use only one hash. If needed we can have multiple again but in that
--- case I will probably prefix (i.e. rename) the lookups in the cached font file.
-
--- Todo: make plugin feature that operates on char/glyphnode arrays
-
-local type, next, tonumber = type, next, tonumber
-local random = math.random
-local formatters = string.formatters
-
-local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-
-local registertracker = trackers.register
-local registerdirective = directives.register
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-
-local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
-local trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
-local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
-
-local quit_on_no_replacement = true -- maybe per font
-local zwnjruns = true
-
-registerdirective("otf.zwnjruns", function(v) zwnjruns = v end)
-registerdirective("otf.chain.quitonnoreplacement",function(value) quit_on_no_replacement = value end)
-
-local report_direct = logs.reporter("fonts","otf direct")
-local report_subchain = logs.reporter("fonts","otf subchain")
-local report_chain = logs.reporter("fonts","otf chain")
-local report_process = logs.reporter("fonts","otf process")
-local report_prepare = logs.reporter("fonts","otf prepare")
-local report_run = logs.reporter("fonts","otf run")
-
-registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
-
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
-registertracker("otf.actions","otf.replacements,otf.positions")
-registertracker("otf.injections","nodes.injections")
-
-registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-
-local nuts = nodes.nuts
-local tonode = nuts.tonode
-local tonut = nuts.tonut
-
-local getfield = nuts.getfield
-local setfield = nuts.setfield
-local getnext = nuts.getnext
-local setnext = nuts.setnext
-local getprev = nuts.getprev
-local setprev = nuts.setprev
-local getid = nuts.getid
-local getattr = nuts.getattr
-local setattr = nuts.setattr
-local getprop = nuts.getprop
-local setprop = nuts.setprop
-local getfont = nuts.getfont
-local getsubtype = nuts.getsubtype
-local setsubtype = nuts.setsubtype
-local getchar = nuts.getchar
-local setchar = nuts.setchar
-
-local insert_node_after = nuts.insert_after
-local copy_node = nuts.copy
-local copy_node_list = nuts.copy_list
-local find_node_tail = nuts.tail
-local flush_node_list = nuts.flush_list
-local flush_node = nuts.flush_node
-local end_of_math = nuts.end_of_math
-local traverse_nodes = nuts.traverse
-local traverse_id = nuts.traverse_id
-
-local setmetatableindex = table.setmetatableindex
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local nodecodes = nodes.nodecodes
-local glyphcodes = nodes.glyphcodes
-local disccodes = nodes.disccodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local math_code = nodecodes.math
-local dir_code = nodecodes.dir
-local localpar_code = nodecodes.localpar
-
-local discretionary_code = disccodes.discretionary
-local ligature_code = glyphcodes.ligature
-
-local privateattribute = attributes.private
-
--- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is based on KE's patches but there is something fishy
--- there as I'm pretty sure that for husayni we need some connection (as it's much
--- more complex than an average font) but I need proper examples of all cases, not
--- of only some.
-
-local a_state = privateattribute('state')
-local a_cursbase = privateattribute('cursbase') -- to be checked, probably can go
-
-local injections = nodes.injections
-local setmark = injections.setmark
-local setcursive = injections.setcursive
-local setkern = injections.setkern
-local setpair = injections.setpair
-local resetinjection = injections.reset
-local copyinjection = injections.copy
-local setligaindex = injections.setligaindex
-local getligaindex = injections.getligaindex
-
-local cursonce = true
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otffeatures = fonts.constructors.features.otf
-local registerotffeature = otffeatures.register
-
-local onetimemessage = fonts.loggers.onetimemessage or function() end
-
-local getrandom = utilities and utilities.randomizer and utilities.randomizer.get
-
-otf.defaultnodealternate = "none" -- first last
-
--- we share some vars here, after all, we have no nested lookups and less code
-
-local tfmdata = false
-local characters = false
-local descriptions = false
-local resources = false
-local marks = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local lookuptypes = false
-local lookuptags = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
-local sweephead = { }
-local sweepnode = nil
-local sweepprev = nil
-local sweepnext = nil
-
-local notmatchpre = { }
-local notmatchpost = { }
-local notmatchreplace = { }
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_direct(...)
-end
-
-local function logwarning(...)
- report_direct(...)
-end
-
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
-
-local function gref(n) -- currently the same as in font-otb
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = f_unicode(ni)
- nam[i] = di and di.name or "-"
- end
- end
- return f_unilist(num,nam)
- else
- return ""
- end
-end
-
-local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_
- if index then
- return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index)
- elseif lookupname then
- return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname])
- elseif chainlookupname then
- return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname])
- elseif chainname then
- return formatters["feature %a, chain %a"](kind,lookuptags[chainname])
- else
- return formatters["feature %a"](kind)
- end
-end
-
-local function pref(kind,lookupname)
- return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname])
-end
-
--- We can assume that languages that use marks are not hyphenated. We can also assume
--- that at most one discretionary is present.
-
--- We do need components in funny kerning mode but maybe I can better reconstruct then
--- as we do have the font components info available; removing components makes the
--- previous code much simpler. Also, later on copying and freeing becomes easier.
--- However, for arabic we need to keep them around for the sake of mark placement
--- and indices.
-
-local function copy_glyph(g) -- next and prev are untouched !
- local components = getfield(g,"components")
- if components then
- setfield(g,"components",nil)
- local n = copy_node(g)
- copyinjection(n,g) -- we need to preserve the lig indices
- setfield(g,"components",components)
- return n
- else
- local n = copy_node(g)
- copyinjection(n,g) -- we need to preserve the lig indices
- return n
- end
-end
-
-local function flattendisk(head,disc)
- local replace = getfield(disc,"replace")
- setfield(disc,"replace",nil)
- flush_node(disc)
- if head == disc then
- local next = getnext(disc)
- if replace then
- if next then
- local tail = find_node_tail(replace)
- setnext(tail,next)
- setprev(next,tail)
- end
- return replace, replace
- elseif next then
- return next, next
- else
- return -- maybe warning
- end
- else
- local next = getnext(disc)
- local prev = getprev(disc)
- if replace then
- local tail = find_node_tail(replace)
- if next then
- setnext(tail,next)
- setprev(next,tail)
- end
- setnext(prev,replace)
- setprev(replace,prev)
- return head, replace
- else
- if next then
- setprev(next,prev)
- end
- setnext(prev,next)
- return head, next
- end
- end
-end
-
-local function appenddisc(disc,list)
- local post = getfield(disc,"post")
- local replace = getfield(disc,"replace")
- local phead = list
- local rhead = copy_node_list(list)
- local ptail = find_node_tail(post)
- local rtail = find_node_tail(replace)
- if post then
- setnext(ptail,phead)
- setprev(phead,ptail)
- else
- setfield(disc,"post",phead)
- end
- if replace then
- setnext(rtail,rhead)
- setprev(rhead,rtail)
- else
- setfield(disc,"replace",rhead)
- end
-end
-
--- start is a mark and we need to keep that one
-
-local function markstoligature(kind,lookupname,head,start,stop,char)
- if start == stop and getchar(start) == char then
- return head, start
- else
- local prev = getprev(start)
- local next = getnext(stop)
- setprev(start,nil)
- setnext(stop,nil)
- local base = copy_glyph(start)
- if head == start then
- head = base
- end
- resetinjection(base)
- setchar(base,char)
- setsubtype(base,ligature_code)
- setfield(base,"components",start)
- if prev then
- setnext(prev,base)
- end
- if next then
- setprev(next,base)
- end
- setnext(base,next)
- setprev(base,prev)
- return head, base
- end
-end
-
--- The next code is somewhat complicated by the fact that some fonts can have ligatures made
--- from ligatures that themselves have marks. This was identified by Kai in for instance
--- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
--- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
--- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
--- third component.
-
-local function getcomponentindex(start) -- we could store this offset in the glyph (nofcomponents)
- if getid(start) ~= glyph_code then -- and then get rid of all components
- return 0
- elseif getsubtype(start) == ligature_code then
- local i = 0
- local components = getfield(start,"components")
- while components do
- i = i + getcomponentindex(components)
- components = getnext(components)
- end
- return i
- elseif not marks[getchar(start)] then
- return 1
- else
- return 0
- end
-end
-
-local a_noligature = attributes.private("noligature")
-
-local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound) -- brr head
- if getattr(start,a_noligature) == 1 then
- -- so we can do: e\noligature{ff}e e\noligature{f}fie (we only look at the first)
- return head, start
- end
- if start == stop and getchar(start) == char then
- resetinjection(start)
- setchar(start,char)
- return head, start
- end
- -- needs testing (side effects):
- local components = getfield(start,"components")
- if components then
- -- we get a double free .. needs checking
- -- flush_node_list(components)
- end
- --
- local prev = getprev(start)
- local next = getnext(stop)
- local comp = start
- setprev(start,nil)
- setnext(stop,nil)
- local base = copy_glyph(start)
- if start == head then
- head = base
- end
- resetinjection(base)
- setchar(base,char)
- setsubtype(base,ligature_code)
- setfield(base,"components",comp) -- start can have components ... do we need to flush?
- if prev then
- setnext(prev,base)
- end
- if next then
- setprev(next,base)
- end
- setprev(base,prev)
- setnext(base,next)
- if not discfound then
- local deletemarks = markflag ~= "mark"
- local components = start
- local baseindex = 0
- local componentindex = 0
- local head = base
- local current = base
- -- first we loop over the glyphs in start .. stop
- while start do
- local char = getchar(start)
- if not marks[char] then
- baseindex = baseindex + componentindex
- componentindex = getcomponentindex(start)
- elseif not deletemarks then -- quite fishy
- setligaindex(start,baseindex + getligaindex(start,componentindex))
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
- end
- local n = copy_node(start)
- copyinjection(n,start)
- head, current = insert_node_after(head,current,n) -- unlikely that mark has components
- elseif trace_marks then
- logwarning("%s: delete mark %s",pref(kind,lookupname),gref(char))
- end
- start = getnext(start)
- end
- -- we can have one accent as part of a lookup and another following
- -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
- local start = getnext(current)
- while start and getid(start) == glyph_code do
- local char = getchar(start)
- if marks[char] then
- setligaindex(start,baseindex + getligaindex(start,componentindex))
- if trace_marks then
- logwarning("%s: set mark %s, gets index %s",pref(kind,lookupname),gref(char),getligaindex(start))
- end
- else
- break
- end
- start = getnext(start)
- end
- else
- -- discfound ... forget about marks .. probably no scripts that hyphenate and have marks
- local discprev = getprev(discfound)
- local discnext = getnext(discfound)
- if discprev and discnext then
- -- we assume normalization in context, and don't care about generic ... especially
- -- \- can give problems as there we can have a negative char but that won't match
- -- anyway
- local pre = getfield(discfound,"pre")
- local post = getfield(discfound,"post")
- local replace = getfield(discfound,"replace")
- if not replace then -- todo: signal simple hyphen
- local prev = getprev(base)
- local copied = copy_node_list(comp)
- setprev(discnext,nil) -- also blocks funny assignments
- setnext(discprev,nil) -- also blocks funny assignments
- if pre then
- setnext(discprev,pre)
- setprev(pre,discprev)
- end
- pre = comp
- if post then
- local tail = find_node_tail(post)
- setnext(tail,discnext)
- setprev(discnext,tail)
- setprev(post,nil)
- else
- post = discnext
- end
- setnext(prev,discfound)
- setprev(discfound,prev)
- setnext(discfound,next)
- setprev(next,discfound)
- setnext(base,nil)
- setprev(base,nil)
- setfield(base,"components",copied)
- setfield(discfound,"pre",pre)
- setfield(discfound,"post",post)
- setfield(discfound,"replace",base)
- setsubtype(discfound,discretionary_code)
- base = prev -- restart
- end
- end
- end
- return head, base
-end
-
-local function multiple_glyphs(head,start,multiple,ignoremarks)
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- resetinjection(start)
- setchar(start,multiple[1])
- if nofmultiples > 1 then
- local sn = getnext(start)
- for k=2,nofmultiples do -- todo: use insert_node
--- untested:
---
--- while ignoremarks and marks[getchar(sn)] then
--- local sn = getnext(sn)
--- end
- local n = copy_node(start) -- ignore components
- resetinjection(n)
- setchar(n,multiple[k])
- setprev(n,start)
- setnext(n,sn)
- if sn then
- setprev(sn,n)
- end
- setnext(start,n)
- start = n
- end
- end
- return head, start, true
- else
- if trace_multiples then
- logprocess("no multiple for %s",gref(getchar(start)))
- end
- return head, start, false
- end
-end
-
-local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
- local n = #alternatives
- if value == "random" then
- local r = getrandom and getrandom("glyph",1,n) or random(1,n)
- return alternatives[r], trace_alternatives and formatters["value %a, taking %a"](value,r)
- elseif value == "first" then
- return alternatives[1], trace_alternatives and formatters["value %a, taking %a"](value,1)
- elseif value == "last" then
- return alternatives[n], trace_alternatives and formatters["value %a, taking %a"](value,n)
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif value > n then
- local defaultalt = otf.defaultnodealternate
- if defaultalt == "first" then
- return alternatives[n], trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
- elseif defaultalt == "last" then
- return alternatives[1], trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
- else
- return false, trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
- end
- elseif value == 0 then
- return getchar(start), trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
- elseif value < 1 then
- return alternatives[1], trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
- else
- return alternatives[value], trace_alternatives and formatters["value %a, taking %a"](value,value)
- end
- end
-end
-
--- handlers
-
-function handlers.gsub_single(head,start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(getchar(start)),gref(replacement))
- end
- resetinjection(start)
- setchar(start,replacement)
- return head, start, true
-end
-
-function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice, comment = get_alternative_glyph(start,alternative,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(getchar(start)),choice,gref(choice),comment)
- end
- resetinjection(start)
- setchar(start,choice)
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(getchar(start)),comment)
- end
- end
- return head, start, true
-end
-
-function handlers.gsub_multiple(head,start,kind,lookupname,multiple,sequence)
- if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(getchar(start)),gref(multiple))
- end
- return multiple_glyphs(head,start,multiple,sequence.flags[1])
-end
-
-function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop = getnext(start), nil
- local startchar = getchar(start)
- if marks[startchar] then
- while s do
- local id = getid(s)
- if id == glyph_code and getfont(s) == currentfont and getsubtype(s)<256 then
- local lg = ligature[getchar(s)]
- if lg then
- stop = s
- ligature = lg
- s = getnext(s)
- else
- break
- end
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = getchar(stop)
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
- else
- head, start = markstoligature(kind,lookupname,head,start,stop,lig)
- end
- return head, start, true, false
- else
- -- ok, goto next lookup
- end
- end
- else
- local skipmark = sequence.flags[1]
- local discfound = false
- local lastdisc = nil
- while s do
- local id = getid(s)
- if id == glyph_code and getsubtype(s)<256 then -- not needed
- if getfont(s) == currentfont then -- also not needed only when mark
- local char = getchar(s)
- if skipmark and marks[char] then
- s = getnext(s)
- else -- ligature is a tree
- local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
- if lg then
- if not discfound and lastdisc then
- discfound = lastdisc
- lastdisc = nil
- end
- stop = s -- needed for fake so outside then
- ligature = lg
- s = getnext(s)
- else
- break
- end
- end
- else
- break
- end
- elseif id == disc_code then
- lastdisc = s
- s = getnext(s)
- else
- break
- end
- end
- local lig = ligature.ligature -- can't we get rid of this .ligature?
- if lig then
- if stop then
- if trace_ligatures then
- local stopchar = getchar(stop)
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(getchar(start)))
- else
- head, start = toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
- end
- else
- -- weird but happens (in some arabic font)
- resetinjection(start)
- setchar(start,lig)
- if trace_ligatures then
- logprocess("%s: replacing %s by (no real) ligature %s case 3",pref(kind,lookupname),gref(startchar),gref(lig))
- end
- end
- return head, start, true, discfound
- else
- -- weird but happens, pseudo ligatures ... just the components
- end
- end
- return head, start, false, discfound
-end
-
-function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
- local startchar = getchar(start)
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,injection) -- ,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return head, start, false
-end
-
-function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence,lookuphash,i,injection)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = getnext(start)
- if not snext then
- return head, start, false
- else
- local prev = start
- local done = false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = getnext(snext)
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,injection) -- characters[startchar])
- if trace_kerns then
- local startchar = getchar(start)
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,injection) -- characters[nextchar])
- if trace_kerns then
- local startchar = getchar(start)
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn,injection)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(getchar(prev)),gref(nextchar)) -- prev?
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
-end
-
---[[ldx--
-We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.
---ldx]]--
-
-function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
- if marks[markchar] then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = getchar(start)
- if marks[markchar] then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local index = getligaindex(start)
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor, ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar]) -- index
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- else
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s with index %a",pref(kind,lookupname),gref(markchar),gref(basechar),index)
- end
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = getchar(start)
- if marks[markchar] then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getligaindex(start)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = getligaindex(base)
- if blc and blc ~= slc then
- base = getprev(base)
- else
- break
- end
- end
- end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and getprop(start,a_cursbase)
- if not alreadydone then
- local done = false
- local startchar = getchar(start)
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = getnext(nxt)
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
- end
- return head, start, false
- end
-end
-
---[[ldx--
-I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.
---ldx]]--
-
-local chainprocs = { }
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_subchain(...)
-end
-
-local logwarning = report_subchain
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_chain(...)
-end
-
-local logwarning = report_chain
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
-function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
-end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
-function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = getchar(start)
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- resetinjection(start)
- setchar(start,replacement)
- return head, start, true
- else
- return head, start, false
- end
-end
-
---[[ldx--
-This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:
-
-
-xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx
-
-
---[[ldx--
-Here we replace start by a single variant.
---ldx]]--
-
-function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- local current = start
- local subtables = currentlookup.subtables
- if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: % t",subtables)
- end
- while current do
- if getid(current) == glyph_code then
- local currentchar = getchar(current)
- local lookupname = subtables[1] -- only 1
- local replacement = lookuphash[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- replacement = replacement[currentchar]
- if not replacement or replacement == "" then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- resetinjection(current)
- setchar(current,replacement)
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = getnext(current)
- end
- end
- return head, start, false
-end
-
---[[ldx--
-Here we replace start by a sequence of new glyphs.
---ldx]]--
-
-function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- -- local head, n = delete_till_stop(head,start,stop)
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = lookuphash[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- replacements = replacements[startchar]
- if not replacements or replacement == "" then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- return multiple_glyphs(head,start,replacements,currentlookup.flags[1])
- end
- end
- return head, start, false
-end
-
---[[ldx--
-Here we replace start by new glyph. First we delete the rest of the match.
---ldx]]--
-
--- char_1 mark_1 -> char_x mark_1 (ignore marks)
--- char_1 mark_1 -> char_x
-
--- to be checked: do we always have just one glyph?
--- we can also have alternates for marks
--- marks come last anyway
--- are there cases where we need to delete the mark
-
-function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local current = start
- local subtables = currentlookup.subtables
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- while current do
- if getid(current) == glyph_code then -- is this check needed?
- local currentchar = getchar(current)
- local lookupname = subtables[1]
- local alternatives = lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives = alternatives[currentchar]
- if alternatives then
- local choice, comment = get_alternative_glyph(current,alternatives,value,trace_alternatives)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
- end
- resetinjection(start)
- setchar(start,choice)
- else
- if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
- end
- end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = getnext(current)
- end
- end
- return head, start, false
-end
-
---[[ldx--
-When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).
---ldx]]--
-
-function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = lookuphash[lookupname]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- ligatures = ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s = getnext(start)
- local discfound = false
- local last = stop
- local nofreplacements = 1
- local skipmark = currentlookup.flags[1]
- while s do
- local id = getid(s)
- if id == disc_code then
- if not discfound then
- discfound = s
- end
- if s == stop then
- break -- okay? or before the disc
- else
- s = getnext(s)
- end
- else
- local schar = getchar(s)
- if skipmark and marks[schar] then -- marks
- s = getnext(s)
- else
- local lg = ligatures[schar]
- if lg then
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = getnext(s)
- end
- else
- break
- end
- end
- end
- end
- local l2 = ligatures.ligature
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)),gref(l2))
- end
- end
- head, start = toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
- return head, start, true, nofreplacements, discfound
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(getchar(stop)))
- end
- end
- end
- end
- return head, start, false, 0, false
-end
-
-function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns) -- ,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
- end
- return head, start, false
-end
-
-function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- local snext = getnext(start)
- if snext then
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and getid(snext) == glyph_code and getfont(snext) == currentfont and getsubtype(snext)<256 do
- local nextchar = getchar(snext)
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = getnext(snext)
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = getchar(start)
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a) -- ,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = getchar(start)
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b) -- ,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(getchar(prev)),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
- end
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = getprev(start) -- [glyph] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = getprev(start) -- [glyph] [optional marks] [start=mark]
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- local basechar = getchar(base)
- if marks[basechar] then
- while true do
- base = getprev(base)
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then
- basechar = getchar(base)
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return head, start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local index = getligaindex(start)
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar])
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = getchar(start)
- if marks[markchar] then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = getprev(start) -- [glyph] [basemark] [start=mark]
- local slc = getligaindex(start)
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = getligaindex(base)
- if blc and blc ~= slc then
- base = getprev(base)
- else
- break
- end
- end
- end
- if base and getid(base) == glyph_code and getfont(base) == currentfont and getsubtype(base)<256 then -- subtype test can go
- local basechar = getchar(base)
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma,characters[basechar],true)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and getprop(start,a_cursbase)
- if not alreadydone then
- local startchar = getchar(start)
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = lookuphash[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = getnext(start)
- while not done and nxt and getid(nxt) == glyph_code and getfont(nxt) == currentfont and getsubtype(nxt)<256 do
- local nextchar = getchar(nxt)
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = getnext(nxt)
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- elseif trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(getchar(start)),alreadydone)
- end
- return head, start, false
- end
- end
- return head, start, false
-end
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
- else
- logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
-end
-
--- A previous version had disc collapsing code in the (single sub) handler plus some
--- checking in the main loop, but that left the pre/post sequences undone. The best
--- solution is to add some checking there and backtrack when a replace/post matches
--- but it takes a bit of work to figure out an efficient way (this is what the sweep*
--- names refer to). I might look into that variant one day again as it can replace
--- some other code too. In that approach we can have a special version for gub and pos
--- which gains some speed. This method does the test and passes info to the handlers
--- (sweepnode, sweepmode, sweepprev, sweepnext, etc). Here collapsing is handled in the
--- main loop which also makes code elsewhere simpler (i.e. no need for the other special
--- runners and disc code in ligature building). I also experimented with pushing preceding
--- glyphs sequences in the replace/pre fields beforehand which saves checking afterwards
--- but at the cost of duplicate glyphs (memory) but it's too much overhead (runtime).
---
--- In the meantime Kai had moved the code from the single chain into a more general handler
--- and this one (renamed to chaindisk) is used now. I optimized the code a bit and brought
--- it in sycn with the other code. Hopefully I didn't introduce errors. Note: this somewhat
--- complex approach is meant for fonts that implement (for instance) ligatures by character
--- replacement which to some extend is not that suitable for hyphenation. I also use some
--- helpers. This method passes some states but reparses the list. There is room for a bit of
--- speed up but that will be done in the context version. (In fact a partial rewrite of all
--- code can bring some more efficientry.)
---
--- I didn't test it with extremes but successive disc nodes still can give issues but in
--- order to handle that we need more complex code which also slows down even more. The main
--- loop variant could deal with that: test, collapse, backtrack.
-
-local function chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,chainindex,sequence,chainproc)
-
- if not start then
- return head, start, false
- end
-
- local startishead = start == head
- local seq = ck[3]
- local f = ck[4]
- local l = ck[5]
- local s = #seq
- local done = false
- local sweepnode = sweepnode
- local sweeptype = sweeptype
- local sweepoverflow = false
- local checkdisc = getprev(head) -- hm bad name head
- local keepdisc = not sweepnode
- local lookaheaddisc = nil
- local backtrackdisc = nil
- local current = start
- local last = start
- local prev = getprev(start)
-
- -- fishy: so we can overflow and then go on in the sweep?
-
- local i = f
- while i <= l do
- local id = getid(current)
- if id == glyph_code then
- i = i + 1
- last = current
- current = getnext(current)
- elseif id == disc_code then
- if keepdisc then
- keepdisc = false
- if notmatchpre[current] ~= notmatchreplace[current] then
- lookaheaddisc = current
- end
- local replace = getfield(current,"replace")
- while replace and i <= l do
- if getid(replace) == glyph_code then
- i = i + 1
- end
- replace = getnext(replace)
- end
- last = current
- current = getnext(c)
- else
- head, current = flattendisk(head,current)
- end
- else
- last = current
- current = getnext(current)
- end
- if current then
- -- go on
- elseif sweepoverflow then
- -- we already are folling up on sweepnode
- break
- elseif sweeptype == "post" or sweeptype == "replace" then
- current = getnext(sweepnode)
- if current then
- sweeptype = nil
- sweepoverflow = true
- else
- break
- end
- else
- break -- added
- end
- end
-
- if sweepoverflow then
- local prev = current and getprev(current)
- if not current or prev ~= sweepnode then
- local head = getnext(sweepnode)
- local tail = nil
- if prev then
- tail = prev
- setprev(current,sweepnode)
- else
- tail = find_node_tail(head)
- end
- setnext(sweepnode,current)
- setprev(head,nil)
- setnext(tail,nil)
- appenddisc(sweepnode,head)
- end
- end
-
- if l < s then
- local i = l
- local t = sweeptype == "post" or sweeptype == "replace"
- while current and i < s do
- local id = getid(current)
- if id == glyph_code then
- i = i + 1
- current = getnext(current)
- elseif id == disc_code then
- if keepdisc then
- keepdisc = false
- if notmatchpre[current] ~= notmatchreplace[current] then
- lookaheaddisc = current
- end
- local replace = getfield(c,"replace")
- while replace and i < s do
- if getid(replace) == glyph_code then
- i = i + 1
- end
- replace = getnext(replace)
- end
- current = getnext(current)
- elseif notmatchpre[current] ~= notmatchreplace[current] then
- head, current = flattendisk(head,current)
- else
- current = getnext(current) -- HH
- end
- else
- current = getnext(current)
- end
- if not current and t then
- current = getnext(sweepnode)
- if current then
- sweeptype = nil
- end
- end
- end
- end
-
- if f > 1 then
- local current = prev
- local i = f
- local t = sweeptype == "pre" or sweeptype == "replace"
- if not current and t and current == checkdisk then
- current = getprev(sweepnode)
- end
- while current and i > 1 do -- missing getprev added / moved outside
- local id = getid(current)
- if id == glyph_code then
- i = i - 1
- elseif id == disc_code then
- if keepdisc then
- keepdisc = false
- if notmatchpost[current] ~= notmatchreplace[current] then
- backtrackdisc = current
- end
- local replace = getfield(current,"replace")
- while replace and i > 1 do
- if getid(replace) == glyph_code then
- i = i - 1
- end
- replace = getnext(replace)
- end
- elseif notmatchpost[current] ~= notmatchreplace[current] then
- head, current = flattendisk(head,current)
- end
- end
- current = getprev(current)
- if t and current == checkdisk then
- current = getprev(sweepnode)
- end
- end
- end
-
- local ok = false
- if lookaheaddisc then
-
- local cf = start
- local cl = getprev(lookaheaddisc)
- local cprev = getprev(start)
- local insertedmarks = 0
-
- while cprev and getid(cf) == glyph_code and getfont(cf) == currentfont and getsubtype(cf) < 256 and marks[getchar(cf)] do
- insertedmarks = insertedmarks + 1
- cf = cprev
- startishead = cf == head
- cprev = getprev(cprev)
- end
-
- setprev(lookaheaddisc,cprev)
- if cprev then
- setnext(cprev,lookaheaddisc)
- end
- setprev(cf,nil)
- setnext(cl,nil)
- if startishead then
- head = lookaheaddisc
- end
-
- local replace = getfield(lookaheaddisc,"replace")
- local pre = getfield(lookaheaddisc,"pre")
- local new = copy_node_list(cf)
- local cnew = new
- for i=1,insertedmarks do
- cnew = getnext(cnew)
- end
- local clast = cnew
- for i=f,l do
- clast = getnext(clast)
- end
- if not notmatchpre[lookaheaddisc] then
- cf, start, ok = chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if not notmatchreplace[lookaheaddisc] then
- new, cnew, ok = chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if pre then
- setnext(cl,pre)
- setprev(pre,cl)
- end
- if replace then
- local tail = find_node_tail(new)
- setnext(tail,replace)
- setprev(replace,tail)
- end
- setfield(lookaheaddisc,"pre",cf) -- also updates tail
- setfield(lookaheaddisc,"replace",new) -- also updates tail
-
- start = getprev(lookaheaddisc)
- sweephead[cf] = getnext(clast)
- sweephead[new] = getnext(last)
-
- elseif backtrackdisc then
-
- local cf = getnext(backtrackdisc)
- local cl = start
- local cnext = getnext(start)
- local insertedmarks = 0
-
- while cnext and getid(cnext) == glyph_code and getfont(cnext) == currentfont and getsubtype(cnext) < 256 and marks[getchar(cnext)] do
- insertedmarks = insertedmarks + 1
- cl = cnext
- cnext = getnext(cnext)
- end
- if cnext then
- setprev(cnext,backtrackdisc)
- end
- setnext(backtrackdisc,cnext)
- setprev(cf,nil)
- setnext(cl,nil)
- local replace = getfield(backtrackdisc,"replace")
- local post = getfield(backtrackdisc,"post")
- local new = copy_node_list(cf)
- local cnew = find_node_tail(new)
- for i=1,insertedmarks do
- cnew = getprev(cnew)
- end
- local clast = cnew
- for i=f,l do
- clast = getnext(clast)
- end
- if not notmatchpost[backtrackdisc] then
- cf, start, ok = chainproc(cf,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if not notmatchreplace[backtrackdisc] then
- new, cnew, ok = chainproc(new,cnew,clast,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if post then
- local tail = find_node_tail(post)
- setnext(tail,cf)
- setprev(cf,tail)
- else
- post = cf
- end
- if replace then
- local tail = find_node_tail(replace)
- setnext(tail,new)
- setprev(new,tail)
- else
- replace = new
- end
- setfield(backtrackdisc,"post",post) -- also updates tail
- setfield(backtrackdisc,"replace",replace) -- also updates tail
- start = getprev(backtrackdisc)
- sweephead[post] = getnext(clast)
- sweephead[replace] = getnext(last)
-
- else
-
- head, start, ok = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
-
- end
-
- return head, start, ok
-end
-
-local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
- local sweepnode = sweepnode
- local sweeptype = sweeptype
- local diskseen = false
- local checkdisc = getprev(head)
- local flags = sequence.flags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local markclass = sequence.markclass
- local skipped = false
-
- for k=1,#contexts do -- i've only seen ccmp having > 1 (e.g. dejavu)
- local match = true
- local current = start
- local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = getid(current) == glyph_code and getfont(current) == currentfont and getsubtype(current)<256 and seq[1][getchar(current)]
- else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f = ck[4]
- local l = ck[5]
- -- current match
- if f == 1 and f == l then -- current only
- -- already a hit
- -- match = true
- else -- before/current/after | before/current | current/after
- -- no need to test first hit (to be optimized)
- if f == l then -- new, else last out of sync (f is > 1)
- -- match = true
- else
- local discfound = nil
- local n = f + 1
- last = getnext(last)
- while n <= l do
- if not last and (sweeptype == "post" or sweeptype == "replace") then
- last = getnext(sweepnode)
- sweeptype = nil
- end
- if last then
- local id = getid(last)
- if id == glyph_code then
- if getfont(last) == currentfont and getsubtype(last)<256 then
- local char = getchar(last)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class or "base"
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = getnext(last)
- elseif seq[n][char] then
- if n < l then
- last = getnext(last)
- end
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- elseif id == disc_code then
- diskseen = true
- discfound = last
- notmatchpre[last] = nil
- notmatchpost[last] = true
- notmatchreplace[last] = nil
- local pre = getfield(last,"pre")
- local replace = getfield(last,"replace")
- if pre then
- local n = n
- while pre do
- if seq[n][getchar(pre)] then
- n = n + 1
- pre = getnext(pre)
- if n > l then
- break
- end
- else
- notmatchpre[last] = true
- break
- end
- end
- if n <= l then
- notmatchpre[last] = true
- end
- else
- notmatchpre[last] = true
- end
- if replace then
- -- so far we never entered this branch
- while replace do
- if seq[n][getchar(replace)] then
- n = n + 1
- replace = getnext(replace)
- if n > l then
- break
- end
- else
- notmatchreplace[last] = true
- match = not notmatchpre[last]
- break
- end
- end
- match = not notmatchpre[last]
- end
- last = getnext(last)
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- end
- end
- end
- -- before
- if match and f > 1 then
- local prev = getprev(start)
- if prev then
- if prev == checkdisc and (sweeptype == "pre" or sweeptype == "replace") then
- prev = getprev(sweepnode)
- -- sweeptype = nil
- end
- if prev then
- local discfound = nil
- local n = f - 1
- while n >= 1 do
- if prev then
- local id = getid(prev)
- if id == glyph_code then
- if getfont(prev) == currentfont and getsubtype(prev)<256 then -- normal char
- local char = getchar(prev)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpost[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpost[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpost[discfound]
- else
- match = false
- end
- break
- end
- elseif id == disc_code then
- -- the special case: f i where i becomes dottless i ..
- diskseen = true
- discfound = prev
- notmatchpre[prev] = true
- notmatchpost[prev] = nil
- notmatchreplace[prev] = nil
- local pre = getfield(prev,"pre")
- local post = getfield(prev,"post")
- local replace = getfield(prev,"replace")
- if pre ~= start and post ~= start and replace ~= start then
- if post then
- local n = n
- local posttail = find_node_tail(post)
- while posttail do
- if seq[n][getchar(posttail)] then
- n = n - 1
- if posttail == post then
- break
- else
- posttail = getprev(posttail)
- if n < 1 then
- break
- end
- end
- else
- notmatchpost[prev] = true
- break
- end
- end
- if n >= 1 then
- notmatchpost[prev] = true
- end
- else
- notmatchpost[prev] = true
- end
- if replace then
- -- we seldom enter this branch (e.g. on brill efficient)
- local replacetail = find_node_tail(replace)
- while replacetail do
- if seq[n][getchar(replacetail)] then
- n = n - 1
- if replacetail == replace then
- break
- else
- replacetail = getprev(replacetail)
- if n < 1 then
- break
- end
- end
- else
- notmatchreplace[prev] = true
- match = not notmatchpost[prev]
- break
- end
- end
- if not match then
- break
- end
- else
- -- skip 'm
- end
- else
- -- skip 'm
- end
- elseif seq[n][32] then
- n = n -1
- else
- match = false
- break
- end
- prev = getprev(prev)
- elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
- n = n - 1
- else
- match = false
- break
- end
- end
- else
- match = false
- end
- else
- match = false
- end
- end
- -- after
- if match and s > l then
- local current = last and getnext(last)
- if not current then
- if sweeptype == "post" or sweeptype == "replace" then
- current = getnext(sweepnode)
- -- sweeptype = nil
- end
- end
- if current then
- local discfound = nil
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = getid(current)
- if id == glyph_code then
- if getfont(current) == currentfont and getsubtype(current)<256 then -- normal char
- local char = getchar(current)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- else
- if discfound then
- notmatchreplace[discfound] = true
- match = not notmatchpre[discfound]
- else
- match = false
- end
- break
- end
- elseif id == disc_code then
- diskseen = true
- discfound = current
- notmatchpre[current] = nil
- notmatchpost[current] = true
- notmatchreplace[current] = nil
- local pre = getfield(current,"pre")
- local replace = getfield(current,"replace")
- if pre then
- local n = n
- while pre do
- if seq[n][getchar(pre)] then
- n = n + 1
- pre = getnext(pre)
- if n > s then
- break
- end
- else
- notmatchpre[current] = true
- break
- end
- end
- if n <= s then
- notmatchpre[current] = true
- end
- else
- notmatchpre[current] = true
- end
- if replace then
- -- so far we never entered this branch
- while replace do
- if seq[n][getchar(replace)] then
- n = n + 1
- replace = getnext(replace)
- if n > s then
- break
- end
- else
- notmatchreplace[current] = true
- match = notmatchpre[current]
- break
- end
- end
- if not match then
- break
- end
- else
- -- skip 'm
- end
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false
- break
- end
- current = getnext(current)
- elseif seq[n][32] then
- n = n + 1
- else
- match = false
- break
- end
- end
- else
- match = false
- end
- end
- end
- if match then
- -- can lookups be of a different type ?
- local diskchain = diskseen or sweepnode
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = getchar(start)
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
- else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
- end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- if chainlookup then
- local chainproc = chainprocs[chainlookup.type]
- if chainproc then
- local ok
- if diskchain then
- head, start, ok = chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
- else
- head, start, ok = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- end
- if ok then
- done = true
- end
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else -- shouldn't happen
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
- end
- else
- local i = 1
- while start and true do
- if skipped then
- while true do -- todo: use properties
- local char = getchar(start)
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class or "base"
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = getnext(start)
- else
- break
- end
- else
- break
- end
- end
- end
- -- see remark in ms standard under : LookupType 5: Contextual Substitution Subtable
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname]
- if not chainlookup then
- -- we just advance
- i = i + 1
- else
- local chainproc = chainprocs[chainlookup.type]
- if not chainproc then
- -- actually an error
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- i = i + 1
- else
- local ok, n
- if diskchain then
- head, start, ok = chaindisk(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence,chainproc)
- else
- head, start, ok, n = chainproc(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- end
- -- messy since last can be changed !
- if ok then
- done = true
- if n and n > 1 then
- -- we have a ligature (cf the spec we advance one but we really need to test it
- -- as there are fonts out there that are fuzzy and have too many lookups:
- --
- -- U+1105 U+119E U+1105 U+119E : sourcehansansklight: script=hang ccmp=yes
- --
- if i + n > nofchainlookups then
- -- if trace_contexts then
- -- logprocess("%s: quitting lookups",cref(kind,chainname))
- -- end
- break
- else
- -- we need to carry one
- end
- end
- end
- i = i + 1
- end
- end
- if i > nofchainlookups or not start then
- break
- elseif start then
- start = getnext(start)
- end
- end
- end
- else
- local replacements = ck[7]
- if replacements then
- head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
- else
- done = quit_on_no_replacement -- can be meant to be skipped / quite inconsistent in fonts
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
- end
- if done then
- break -- out of contexts (new, needs checking)
- end
- end
- end
- if diskseen then -- maybe move up so that we can turn checking on/off
- notmatchpre = { }
- notmatchpost = { }
- notmatchreplace = { }
- end
- return head, start, done
-end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
-end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
-}
-
-local handle_contextchain = nil
-
--- normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
-
-local function chained_contextchain(head,start,stop,...)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
- end
- return handle_contextchain(head,start,...)
-end
-
-function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handle_contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handle_contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler %a",method)
- local handler = otf.chainhandlers[method]
- handle_contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
-
- handlers.gsub_context = handle_contextchain
- handlers.gsub_contextchain = handle_contextchain
- handlers.gsub_reversecontextchain = handle_contextchain
- handlers.gpos_contextchain = handle_contextchain
- handlers.gpos_context = handle_contextchain
-
- handlers.contextchain = handle_contextchain
-
-end
-
-chainprocs.gsub_context = chained_contextchain
-chainprocs.gsub_contextchain = chained_contextchain
-chainprocs.gsub_reversecontextchain = chained_contextchain
-chainprocs.gpos_contextchain = chained_contextchain
-chainprocs.gpos_context = chained_contextchain
-
-otf.setcontextchain()
-
-local missing = { } -- we only report once
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_process(...)
-end
-
-local logwarning = report_process
-
-local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
-
-local lookuphashes = { }
-
-setmetatableindex(lookuphashes, function(t,font)
- local lookuphash = fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash = false
- end
- t[font] = lookuphash
- return lookuphash
-end)
-
--- fonts.hashes.lookups = lookuphashes
-
-local autofeatures = fonts.analyzers.features
-local featuretypes = otf.tables.featuretypes
-local defaultscript = otf.features.checkeddefaultscript
-local defaultlanguage = otf.features.checkeddefaultlanguage
-
-local function initialize(sequence,script,language,enabled,autoscript,autolanguage)
- local features = sequence.features
- if features then
- local order = sequence.order
- if order then
- local featuretype = featuretypes[sequence.type or "unknown"]
- for i=1,#order do
- local kind = order[i]
- local valid = enabled[kind]
- if valid then
- local scripts = features[kind]
- local languages = scripts and (
- scripts[script] or
- scripts[wildcard] or
- (autoscript and defaultscript(featuretype,autoscript,scripts))
- )
- local enabled = languages and (
- languages[language] or
- languages[wildcard] or
- (autolanguage and defaultlanguage(featuretype,autolanguage,languages))
- )
- if enabled then
- return { valid, autofeatures[kind] or false, sequence, kind }
- end
- end
- end
- else
- -- can't happen
- end
- end
- return false
-end
-
-function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
- local shared = tfmdata.shared
- local properties = tfmdata.properties
- local language = properties.language or "dflt"
- local script = properties.script or "dflt"
- local enabled = shared.features
- local autoscript = enabled and enabled.autoscript
- local autolanguage = enabled and enabled.autolanguage
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = {
- -- indexed but we can also add specific data by key
- }
- rs[language] = rl
- local sequences = tfmdata.resources.sequences
- if sequences then
- for s=1,#sequences do
- local v = enabled and initialize(sequences[s],script,language,enabled,autoscript,autolanguage)
- if v then
- rl[#rl+1] = v
- end
- end
- end
- end
- return rl
-end
-
--- assumptions:
---
--- * languages that use complex disc nodes
-
-local function kernrun(disc,run)
- --
- -- we catch
- --
- if trace_kernruns then
- report_run("kern") -- will be more detailed
- end
- --
- local prev = getprev(disc) -- todo, keep these in the main loop
- local next = getnext(disc) -- todo, keep these in the main loop
- --
- local pre = getfield(disc,"pre")
- local post = getfield(disc,"post")
- local replace = getfield(disc,"replace")
- --
- local prevmarks = prev
- --
- -- can be optional, because why on earth do we get a disc after a mark (okay, maybe when a ccmp
- -- has happened but then it should be in the disc so basically this test indicates an error)
- --
- while prevmarks and getid(prevmarks) == glyph_code and marks[getchar(prevmarks)] and getfont(prevmarks) == currentfont and getsubtype(prevmarks) < 256 do
- prevmarks = getprev(prevmarks)
- end
- --
- if prev and (pre or replace) and not (getid(prev) == glyph_code and getfont(prev) == currentfont and getsubtype(prev)<256) then
- prev = false
- end
- if next and (post or replace) and not (getid(next) == glyph_code and getfont(next) == currentfont and getsubtype(next)<256) then
- next = false
- end
- --
- if not pre then
- -- go on
- elseif prev then
- local nest = getprev(pre)
- setprev(pre,prev)
- setnext(prev,pre)
- run(prevmarks,"preinjections")
- setprev(pre,nest)
- setnext(prev,disc)
- else
- run(pre,"preinjections")
- end
- --
- if not post then
- -- go on
- elseif next then
- local tail = find_node_tail(post)
- setnext(tail,next)
- setprev(next,tail)
- run(post,"postinjections",next)
- setnext(tail,nil)
- setprev(next,disc)
- else
- run(post,"postinjections")
- end
- --
- if not replace and prev and next then
- -- this should be already done by discfound
- setnext(prev,next)
- setprev(next,prev)
- run(prevmarks,"injections",next)
- setnext(prev,disc)
- setprev(next,disc)
- elseif prev and next then
- local tail = find_node_tail(replace)
- local nest = getprev(replace)
- setprev(replace,prev)
- setnext(prev,replace)
- setnext(tail,next)
- setprev(next,tail)
- run(prevmarks,"replaceinjections",next)
- setprev(replace,nest)
- setnext(prev,disc)
- setnext(tail,nil)
- setprev(next,disc)
- elseif prev then
- local nest = getprev(replace)
- setprev(replace,prev)
- setnext(prev,replace)
- run(prevmarks,"replaceinjections")
- setprev(replace,nest)
- setnext(prev,disc)
- elseif next then
- local tail = find_node_tail(replace)
- setnext(tail,next)
- setprev(next,tail)
- run(replace,"replaceinjections",next)
- setnext(tail,nil)
- setprev(next,disc)
- else
- run(replace,"replaceinjections")
- end
-end
-
--- the if new test might be dangerous as luatex will check / set some tail stuff
--- in a temp node
-
-local function comprun(disc,run)
- if trace_compruns then
- report_run("comp: %s",languages.serializediscretionary(disc))
- end
- --
- local pre = getfield(disc,"pre")
- if pre then
- sweepnode = disc
- sweeptype = "pre" -- in alternative code preinjections is used (also used then for proeprties, saves a variable)
- local new, done = run(pre)
- if done then
- setfield(disc,"pre",new)
- end
- end
- --
- local post = getfield(disc,"post")
- if post then
- sweepnode = disc
- sweeptype = "post"
- local new, done = run(post)
- if done then
- setfield(disc,"post",new)
- end
- end
- --
- local replace = getfield(disc,"replace")
- if replace then
- sweepnode = disc
- sweeptype = "replace"
- local new, done = run(replace)
- if done then
- setfield(disc,"replace",new)
- end
- end
- sweepnode = nil
- sweeptype = nil
-end
-
-local function testrun(disc,trun,crun) -- use helper
- local next = getnext(disc)
- if next then
- local replace = getfield(disc,"replace")
- if replace then
- local prev = getprev(disc)
- if prev then
- -- only look ahead
- local tail = find_node_tail(replace)
- -- local nest = getprev(replace)
- setnext(tail,next)
- setprev(next,tail)
- if trun(replace,next) then
- setfield(disc,"replace",nil) -- beware, side effects of nest so first
- setnext(prev,replace)
- setprev(replace,prev)
- setprev(next,tail)
- setnext(tail,next)
- setprev(disc,nil)
- setnext(disc,nil)
- flush_node_list(disc)
- return replace -- restart
- else
- setnext(tail,nil)
- setprev(next,disc)
- end
- else
- -- weird case
- end
- else
- -- no need
- end
- else
- -- weird case
- end
- comprun(disc,crun)
- return next
-end
-
-local function discrun(disc,drun,krun)
- local next = getnext(disc)
- local prev = getprev(disc)
- if trace_discruns then
- report_run("disc") -- will be more detailed
- end
- if next and prev then
- setnext(prev,next)
- -- setprev(next,prev)
- drun(prev)
- setnext(prev,disc)
- -- setprev(next,disc)
- end
- --
- local pre = getfield(disc,"pre")
- if not pre then
- -- go on
- elseif prev then
- local nest = getprev(pre)
- setprev(pre,prev)
- setnext(prev,pre)
- krun(prev,"preinjections")
- setprev(pre,nest)
- setnext(prev,disc)
- else
- krun(pre,"preinjections")
- end
- return next
-end
-
--- todo: maybe run lr and rl stretches
-
-local function featuresprocessor(head,font,attr)
-
- local lookuphash = lookuphashes[font] -- we can also check sequences here
-
- if not lookuphash then
- return head, false
- end
-
- head = tonut(head)
-
- if trace_steps then
- checkstep(head)
- end
-
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
-
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
- lookuptags = resources.lookuptags
-
- currentfont = font
- rlmode = 0
- sweephead = { }
-
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
-
- local dirstack = { } -- could move outside function
-
- -- We could work on sub start-stop ranges instead but I wonder if there is that
- -- much speed gain (experiments showed that it made not much sense) and we need
- -- to keep track of directions anyway. Also at some point I want to play with
- -- font interactions and then we do need the full sweeps.
-
- -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
- -- so that multiple cases are also covered.)
-
- -- We don't goto the next node of a disc node is created so that we can then treat
- -- the pre, post and replace. It's abit of a hack but works out ok for most cases.
-
- -- there can be less subtype and attr checking in the comprun etc helpers
-
- for s=1,#datasets do
- local dataset = datasets[s]
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- local attribute = dataset[2]
- local sequence = dataset[3] -- sequences[s] -- also dataset[5]
- local kind = dataset[4]
- ----- chain = dataset[5] -- sequence.chain or 0
- local rlparmode = 0
- local topstack = 0
- local success = false
- local typ = sequence.type
- local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- maybe all of them
- local subtables = sequence.subtables
- local handler = handlers[typ]
- if typ == "gsub_reversecontextchain" then -- chain < 0
- -- this is a limited case, no special treatments like 'init' etc
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = getid(start)
- if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- local char = getchar(start)
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- todo: disc?
- head, start, success = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = getprev(start) end
- else
- start = getprev(start)
- end
- else
- start = getprev(start)
- end
- else
- start = getprev(start)
- end
- end
- else
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
-
- local function c_run(head) -- no need to check for 256 and attr probably also the same
- local done = false
- local start = sweephead[head]
- if start then
- sweephead[head] = nil
- else
- start = head
- end
- while start do
- local id = getid(start)
- if id ~= glyph_code then
- -- very unlikely
- start = getnext(start)
- elseif getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local lookupmatch = lookupcache[getchar(start)]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- return head, false
- end
- end
- if done then
- success = true -- needed in this subrun?
- end
- return head, done
- end
-
- local function t_run(start,stop)
- while start ~= stop do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local lookupmatch = lookupcache[getchar(start)]
- if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
- -- if we need more than ligatures we can outline the code and use functions
- local s = getnext(start)
- local l = nil
- while s do
- local lg = lookupmatch[getchar(s)]
- if lg then
- l = lg
- s = getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
- end
- end
- start = getnext(start)
- else
- break
- end
- end
- end
-
- local function d_run(prev) -- we can assume that prev and next are glyphs
- local a = getattr(prev,0)
- if a then
- a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
- else
- a = not attribute or getprop(prev,a_state) == attribute
- end
- if a then
- local lookupmatch = lookupcache[getchar(prev)]
- if lookupmatch then
- -- sequence kan weg
- local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- done = true
- success = true
- end
- end
- end
- end
-
- local function k_run(sub,injection,last)
- local a = getattr(sub,0)
- if a then
- a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
- else
- a = not attribute or getprop(sub,a_state) == attribute
- end
- if a then
- -- sequence kan weg
- for n in traverse_nodes(sub) do -- only gpos
- if n == last then
- break
- end
- local id = getid(n)
- if id == glyph_code then
- local lookupmatch = lookupcache[getchar(n)]
- if lookupmatch then
- local h, d, ok = handler(sub,n,kind,lookupname,lookupmatch,sequence,lookuphash,1,injection)
- if ok then
- done = true
- success = true
- end
- end
- else
- -- message
- end
- end
- end
- end
-
- while start do
- local id = getid(start)
- if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then -- why a 256 test ...
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local char = getchar(start)
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- start = getnext(start)
- end
- elseif id == disc_code then
- if gpossing then
- kernrun(start,k_run)
- start = getnext(start)
- elseif typ == "gsub_ligature" then
- start = testrun(start,t_run,c_run)
- else
- comprun(start,c_run)
- start = getnext(start)
- end
- elseif id == math_code then
- start = getnext(end_of_math(start))
- elseif id == dir_code then
- local dir = getfield(start,"dir")
- if dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = 1
- elseif dir == "+TRT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = -1
- elseif dir == "-TLT" or dir == "-TRT" then
- topstack = topstack - 1
- rlmode = dirstack[topstack] == "+TRT" and -1 or 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- start = getnext(start)
- elseif id == localpar_code then
- local dir = getfield(start,"dir")
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- -- one might wonder if the par dir should be looked at, so we might as well drop the next line
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- start = getnext(start)
- else
- start = getnext(start)
- end
- end
- end
-
- else
-
- local function c_run(head)
- local done = false
- local start = sweephead[head]
- if start then
- sweephead[head] = nil
- else
- start = head
- end
- while start do
- local id = getid(start)
- if id ~= glyph_code then
- -- very unlikely
- start = getnext(start)
- elseif getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local char = getchar(start)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- return head, false
- end
- end
- if done then
- success = true
- end
- return head, done
- end
-
- local function d_run(prev)
- local a = getattr(prev,0)
- if a then
- a = (a == attr) and (not attribute or getprop(prev,a_state) == attribute)
- else
- a = not attribute or getprop(prev,a_state) == attribute
- end
- if a then
- -- brr prev can be disc
- local char = getchar(prev)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local h, d, ok = handler(head,prev,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- end
- end
-
- local function k_run(sub,injection,last)
- local a = getattr(sub,0)
- if a then
- a = (a == attr) and (not attribute or getprop(sub,a_state) == attribute)
- else
- a = not attribute or getprop(sub,a_state) == attribute
- end
- if a then
- for n in traverse_nodes(sub) do -- only gpos
- if n == last then
- break
- end
- local id = getid(n)
- if id == glyph_code then
- local char = getchar(n)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- local h, d, ok = handler(head,n,kind,lookupname,lookupmatch,sequence,lookuphash,i,injection)
- if ok then
- done = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- else
- -- message
- end
- end
- end
- end
-
- local function t_run(start,stop)
- while start ~= stop do
- local id = getid(start)
- if id == glyph_code and getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- local char = getchar(start)
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- if we need more than ligatures we can outline the code and use functions
- local s = getnext(start)
- local l = nil
- while s do
- local lg = lookupmatch[getchar(s)]
- if lg then
- l = lg
- s = getnext(s)
- else
- break
- end
- end
- if l and l.ligature then
- return true
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- end
- start = getnext(start)
- else
- break
- end
- end
- end
-
- while start do
- local id = getid(start)
- if id == glyph_code then
- if getfont(start) == font and getsubtype(start) < 256 then
- local a = getattr(start,0)
- if a then
- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- else
- a = not attribute or getprop(start,a_state) == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local char = getchar(start)
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,kind,lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- elseif gpossing and zwnjruns and char == zwnj then
- discrun(start,d_run)
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = getnext(start) end
- else
- start = getnext(start)
- end
- else
- start = getnext(start)
- end
- elseif id == disc_code then
- if gpossing then
- kernrun(start,k_run)
- start = getnext(start)
- elseif typ == "gsub_ligature" then
- start = testrun(start,t_run,c_run)
- else
- comprun(start,c_run)
- start = getnext(start)
- end
- elseif id == math_code then
- start = getnext(end_of_math(start))
- elseif id == dir_code then
- local dir = getfield(start,"dir")
- if dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = 1
- elseif dir == "+TRT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- rlmode = -1
- elseif dir == "-TLT" or dir == "-TRT" then
- topstack = topstack - 1
- rlmode = dirstack[topstack] == "+TRT" and -1 or 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
- end
- start = getnext(start)
- elseif id == localpar_code then
- local dir = getfield(start,"dir")
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
- end
- start = getnext(start)
- else
- start = getnext(start)
- end
- end
- end
- end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
-
- end
-
- head = tonode(head)
-
- return head, done
-end
-
--- this might move to the loader
-
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if target then
- target[unicode] = lookupdata
- else
- lookuphash[lookupname] = { [unicode] = lookupdata }
- end
-end
-
-local function ligature(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
- end
- target = tu
- end
- target.ligature = unicode
-end
-
--- this is not ok .. it doesn't work for the old loader with new loader extension
--- specs
-
-local function pair(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if not paired then
- -- something is wrong
- elseif others then
- others[paired] = lookupdata
- else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
-end
-
-local action = {
- substitution = generic,
- multiple = generic,
- alternate = generic,
- position = generic,
- ligature = ligature,
- pair = pair,
- kern = pair,
-}
-
-local function prepare_lookups(tfmdata)
-
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local anchor_to_lookup = resources.anchor_to_lookup
- local lookup_to_anchor = resources.lookup_to_anchor
- local lookuptypes = resources.lookuptypes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local duplicates = resources.duplicates
-
- -- we cannot free the entries in the descriptions as sometimes we access
- -- then directly (for instance anchors) ... selectively freeing does save
- -- much memory as it's only a reference to a table and the slot in the
- -- description hash is not freed anyway
-
- -- we can delay this using metatables so that we don't make the hashes for
- -- features we don't use but then we need to loop over the characters
- -- many times so we gain nothing
-
- for unicode, character in next, characters do -- we cannot loop over descriptions !
-
- local description = descriptions[unicode]
-
- if description then
-
- local lookups = description.slookups
- if lookups then
- for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash,duplicates)
- end
- end
-
- local lookups = description.mlookups
- if lookups then
- for lookupname, lookuplist in next, lookups do
- local lookuptype = lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash,duplicates)
- end
- end
- end
-
- local list = description.kerns
- if list then
- for lookup, krn in next, list do -- ref to glyph, saves lookup
- local target = lookuphash[lookup]
- if target then
- target[unicode] = krn
- else
- lookuphash[lookup] = { [unicode] = krn }
- end
- end
- end
-
- local list = description.anchors
- if list then
- for typ, anchors in next, list do -- types
- if typ == "mark" or typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup in next, lookups do
- local target = lookuphash[lookup]
- if target then
- target[unicode] = anchors
- else
- lookuphash[lookup] = { [unicode] = anchors }
- end
- end
- end
- end
- end
- end
- end
-
- end
-
- end
-
-end
-
--- so far
-
-local function split(replacement,original)
- local result = { }
- for i=1,#replacement do
- result[original[i]] = replacement[i]
- end
- return result
-end
-
-local valid = { -- does contextpos work?
- coverage = { chainsub = true, chainpos = true, contextsub = true, contextpos = true },
- reversecoverage = { reversesub = true },
- glyphs = { chainsub = true, chainpos = true, contextsub = true, contextpos = true },
-}
-
-local function prepare_contextchains(tfmdata)
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local lookuptags = resources.lookuptags
- local lookups = rawdata.lookups
- if lookups then
- for lookupname, lookupdata in next, rawdata.lookups do
- local lookuptype = lookupdata.type
- if lookuptype then
- local rules = lookupdata.rules
- if rules then
- local format = lookupdata.format
- local validformat = valid[format]
- if not validformat then
- report_prepare("unsupported format %a",format)
- elseif not validformat[lookuptype] then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname])
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current = rule.current
- local before = rule.before
- local after = rule.after
- local replacements = rule.replacements
- local sequence = { }
- local nofsequences = 0
- -- Eventually we can store start, stop and sequence in the cached file
- -- but then less sharing takes place so best not do that without a lot
- -- of profiling so let's forget about it.
- if before then
- for n=1,#before do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = before[n]
- end
- end
- local start = nofsequences + 1
- for n=1,#current do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = current[n]
- end
- local stop = nofsequences
- if after then
- for n=1,#after do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = after[n]
- end
- end
- if sequence[1] then
- -- Replacements only happen with reverse lookups as they are single only. We
- -- could pack them into current (replacement value instead of true) and then
- -- use sequence[start] instead but it's somewhat ugly.
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- else
- -- no rules
- end
- else
- report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname])
- end
- end
- end
-end
-
--- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- -- beware we need to use the topmost properties table
- local rawdata = tfmdata.shared.rawdata
- local properties = rawdata.properties
- if not properties.initialized then
- local starttime = trace_preparing and os.clock()
- local resources = rawdata.resources
- resources.lookuphash = resources.lookuphash or { }
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized = true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
- end
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- position = 1,
- node = featuresinitializer,
- },
- processors = {
- node = featuresprocessor,
- }
-}
-
--- This can be used for extra handlers, but should be used with care!
-
-otf.handlers = handlers
diff --git a/tex/context/base/mkiv/font-oto.lua b/tex/context/base/mkiv/font-oto.lua
index 13568799b..4b986bd3b 100644
--- a/tex/context/base/mkiv/font-oto.lua
+++ b/tex/context/base/mkiv/font-oto.lua
@@ -6,10 +6,6 @@ if not modules then modules = { } end modules ['font-oto'] = { -- original tex
license = "see context related readme files"
}
--- This is a version of font-otb adapted to the new fontloader code. We used to have two
--- base initialization methods but now we have only one. This means that instead of the
--- old default (independent) we now use the one more similar to node node (shared).
-
local concat, unpack = table.concat, table.unpack
local insert, remove = table.insert, table.remove
local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
@@ -34,6 +30,8 @@ local registerotffeature = otffeatures.register
otf.defaultbasealternate = "none" -- first last
+local getprivate = fonts.constructors.getprivate
+
local wildcard = "*"
local default = "dflt"
@@ -71,21 +69,32 @@ local function cref(feature,sequence)
return formatters["feature %a, type %a, chain lookup %a"](feature,sequence.type,sequence.name)
end
-
-local function report_alternate(feature,sequence,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,sequence),
- gref(descriptions,unicode),
- replacement and gref(descriptions,replacement),
- value,
- comment)
+local function report_substitution(feature,sequence,descriptions,unicode,substitution)
+ if unicode == substitution then
+ report_prepare("%s: base substitution %s maps onto itself",
+ cref(feature,sequence),
+ gref(descriptions,unicode))
+ else
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,sequence),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+ end
end
-local function report_substitution(feature,sequence,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %S",
- cref(feature,sequence),
- gref(descriptions,unicode),
- gref(descriptions,substitution))
+local function report_alternate(feature,sequence,descriptions,unicode,replacement,value,comment)
+ if unicode == replacement then
+ report_prepare("%s: base alternate %s maps onto itself",
+ cref(feature,sequence),
+ gref(descriptions,unicode))
+ else
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,sequence),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+ end
end
local function report_ligature(feature,sequence,descriptions,unicode,ligature)
@@ -158,13 +167,11 @@ end
-- messy if we need to take that into account.
local function makefake(tfmdata,name,present)
- local resources = tfmdata.resources
- local private = resources.private
+ local private = getprivate(tfmdata)
local character = { intermediate = true, ligatures = { } }
resources.unicodes[name] = private
tfmdata.characters[private] = character
tfmdata.descriptions[private] = { name = name }
- resources.private = private + 1
present[name] = private
return character
end
@@ -225,6 +232,9 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local trace_alternatives = trace_baseinit and trace_alternatives
local trace_ligatures = trace_baseinit and trace_ligatures
+ -- A chain of changes is handled in font-con which is clesner because
+ -- we can have shared changes and such.
+
if not changed then
changed = { }
tfmdata.changed = changed
@@ -237,39 +247,44 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
if kind == "gsub_single" then
for i=1,#steps do
for unicode, data in next, steps[i].coverage do
- -- if not changed[unicode] then -- fails for multiple subs in some math fonts
- if trace_singles then
- report_substitution(feature,sequence,descriptions,unicode,data)
- end
+ if unicode ~= data then
changed[unicode] = data
- -- end
+ end
+ if trace_singles then
+ report_substitution(feature,sequence,descriptions,unicode,data)
+ end
end
end
elseif kind == "gsub_alternate" then
for i=1,#steps do
for unicode, data in next, steps[i].coverage do
- if not changed[unicode] then
- local replacement = data[alternate]
- if replacement then
+ local replacement = data[alternate]
+ if replacement then
+ if unicode ~= replacement then
changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,sequence,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = data[1]
+ end
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt == "first" then
+ replacement = data[1]
+ if unicode ~= replacement then
+ changed[unicode] = replacement
+ end
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt == "last" then
+ replacement = data[#data]
+ if unicode ~= replacement then
changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = data[#data]
- if trace_alternatives then
- report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,sequence,descriptions,unicode,replacement,value,"unknown")
- end
+ end
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,sequence,descriptions,unicode,replacement,value,"unknown")
end
end
end
@@ -289,7 +304,6 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis
local nofligatures = #ligatures
if nofligatures > 0 then
-
local characters = tfmdata.characters
local present = { }
local done = trace_baseinit and trace_ligatures and { }
@@ -324,8 +338,9 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
local format = sequence.format
if kind == "gpos_pair" then
for i=1,#steps do
- local step = steps[i]
- if step.format == "kern" then
+ local step = steps[i]
+ local format = step.format
+ if format == "kern" or format == "move" then
for unicode, data in next, steps[i].coverage do
local character = characters[unicode]
local kerns = character.kerns
@@ -353,9 +368,13 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist
local character = characters[unicode]
local kerns = character.kerns
for otherunicode, kern in next, data do
- if not kern[2] and not (kerns and kerns[otherunicode]) then
+ -- kern[2] is true (all zero) or a table
+ local other = kern[2]
+ if other == true or (not other and not (kerns and kerns[otherunicode])) then
local kern = kern[1]
- if kern[1] ~= 0 or kern[2] ~= 0 or kern[4] ~= 0 then
+ if kern == true then
+ -- all zero
+ elseif kern[1] ~= 0 or kern[2] ~= 0 or kern[4] ~= 0 then
-- a complex pair not suitable for basemode
else
kern = kern[3]
@@ -385,6 +404,53 @@ local function initializehashes(tfmdata)
-- already done
end
+local function checkmathreplacements(tfmdata,fullname,fixitalics)
+ if tfmdata.mathparameters then
+ local characters = tfmdata.characters
+ local changed = tfmdata.changed
+ if next(changed) then
+ if trace_preparing or trace_baseinit then
+ report_prepare("checking math replacements for %a",fullname)
+ end
+ for unicode, replacement in next, changed do
+ local u = characters[unicode]
+ local r = characters[replacement]
+ local n = u.next
+ local v = u.vert_variants
+ local h = u.horiz_variants
+ if fixitalics then
+ -- quite some warnings on stix ...
+ local ui = u.italic
+ if ui and not r.italic then
+ if trace_preparing then
+ report_prepare("using %i units of italic correction from %C for %U",ui,unicode,replacement)
+ end
+ r.italic = ui -- print(ui,ri)
+ end
+ end
+ if n and not r.next then
+ if trace_preparing then
+ report_prepare("forcing %s for %C substituted by %U","incremental step",unicode,replacement)
+ end
+ r.next = n
+ end
+ if v and not r.vert_variants then
+ if trace_preparing then
+ report_prepare("forcing %s for %C substituted by %U","vertical variants",unicode,replacement)
+ end
+ r.vert_variants = v
+ end
+ if h and not r.horiz_variants then
+ if trace_preparing then
+ report_prepare("forcing %s for %C substituted by %U","horizontal variants",unicode,replacement)
+ end
+ r.horiz_variants = h
+ end
+ end
+ end
+ end
+end
+
local function featuresinitializer(tfmdata,value)
if true then -- value then
local starttime = trace_preparing and os.clock()
@@ -401,6 +467,8 @@ local function featuresinitializer(tfmdata,value)
local rawfeatures = rawresources and rawresources.features
local basesubstitutions = rawfeatures and rawfeatures.gsub
local basepositionings = rawfeatures and rawfeatures.gpos
+ local substitutionsdone = false
+ local positioningsdone = false
--
if basesubstitutions or basepositionings then
local sequences = tfmdata.resources.sequences
@@ -423,12 +491,14 @@ local function featuresinitializer(tfmdata,value)
end
preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
registerbasefeature(feature,value)
+ substitutionsdone = true
elseif basepositionings and basepositionings[feature] then
if trace_preparing then
report_prepare("filtering base %a feature %a for %a with value %a","pos",feature,fullname,value)
end
preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
registerbasefeature(feature,value)
+ positioningsdone = true
end
end
end
@@ -437,6 +507,10 @@ local function featuresinitializer(tfmdata,value)
end
end
--
+ if substitutionsdone then
+ checkmathreplacements(tfmdata,fullname,features.fixitalics)
+ end
+ --
registerbasehash(tfmdata)
end
if trace_preparing then
diff --git a/tex/context/base/mkiv/font-otp.lua b/tex/context/base/mkiv/font-otp.lua
deleted file mode 100644
index c52e574b9..000000000
--- a/tex/context/base/mkiv/font-otp.lua
+++ /dev/null
@@ -1,894 +0,0 @@
-if not modules then modules = { } end modules ['font-otp'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (packing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: pack math (but not that much to share)
---
--- pitfall 5.2: hashed tables can suddenly become indexed with nil slots
---
--- unless we sort all hashes we can get a different pack order (no big deal but size can differ)
-
-local next, type, tostring = next, type, tostring
-local sort, concat = table.sort, table.concat
-
-local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end)
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
--- also used in other scripts so we need to check some tables:
-
-fonts = fonts or { }
-
-local handlers = fonts.handlers or { }
-fonts.handlers = handlers
-
-local otf = handlers.otf or { }
-handlers.otf = otf
-
-local glists = otf.glists or { "gsub", "gpos" }
-otf.glists = glists
-
-local criterium = 1
-local threshold = 0
-
-local function tabstr_normal(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- if type(v) == "table" then
- s[n] = k .. ">" .. tabstr_normal(v)
- elseif v == true then
- s[n] = k .. "+" -- "=true"
- elseif v then
- s[n] = k .. "=" .. v
- else
- s[n] = k .. "-" -- "=false"
- end
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
-local function tabstr_flat(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- s[n] = k .. "=" .. v
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
-local function tabstr_mixed(t) -- indexed
- local s = { }
- local n = #t
- if n == 0 then
- return ""
- elseif n == 1 then
- local k = t[1]
- if k == true then
- return "++" -- we need to distinguish from "true"
- elseif k == false then
- return "--" -- we need to distinguish from "false"
- else
- return tostring(k) -- number or string
- end
- else
- for i=1,n do
- local k = t[i]
- if k == true then
- s[i] = "++" -- we need to distinguish from "true"
- elseif k == false then
- s[i] = "--" -- we need to distinguish from "false"
- else
- s[i] = k -- number or string
- end
- end
- return concat(s,",")
- end
-end
-
-local function tabstr_boolean(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- if v then
- s[n] = k .. "+"
- else
- s[n] = k .. "-"
- end
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
--- tabstr_boolean_x = tabstr_boolean
-
--- tabstr_boolean = function(t)
--- local a = tabstr_normal(t)
--- local b = tabstr_boolean_x(t)
--- print(a)
--- print(b)
--- return b
--- end
-
--- beware: we cannot unpack and repack the same table because then sharing
--- interferes (we could catch this if needed) .. so for now: save, reload
--- and repack in such cases (never needed anyway) .. a tricky aspect is that
--- we then need to sort more thanks to random hashing
-
-function otf.packdata(data)
-
- if data then
- -- stripdata(data)
- local h, t, c = { }, { }, { }
- local hh, tt, cc = { }, { }, { }
- local nt, ntt = 0, 0
- local function pack_normal(v)
- local tag = tabstr_normal(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_flat(v)
- local tag = tabstr_flat(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_boolean(v)
- local tag = tabstr_boolean(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_indexed(v)
- local tag = concat(v," ")
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_mixed(v)
- local tag = tabstr_mixed(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_final(v)
- -- v == number
- if c[v] <= criterium then
- return t[v]
- else
- -- compact hash
- local hv = hh[v]
- if hv then
- return hv
- else
- ntt = ntt + 1
- tt[ntt] = t[v]
- hh[v] = ntt
- cc[ntt] = c[v]
- return ntt
- end
- end
- end
- local function success(stage,pass)
- if nt == 0 then
- if trace_loading or trace_packing then
- report_otf("pack quality: nothing to pack")
- end
- return false
- elseif nt >= threshold then
- local one, two, rest = 0, 0, 0
- if pass == 1 then
- for k,v in next, c do
- if v == 1 then
- one = one + 1
- elseif v == 2 then
- two = two + 1
- else
- rest = rest + 1
- end
- end
- else
- for k,v in next, cc do
- if v > 20 then
- rest = rest + 1
- elseif v > 10 then
- two = two + 1
- else
- one = one + 1
- end
- end
- data.tables = tt
- end
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
- end
- return true
- else
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold)
- end
- return false
- end
- end
- local function packers(pass)
- if pass == 1 then
- return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
- else
- return pack_final, pack_final, pack_final, pack_final, pack_final
- end
- end
- local resources = data.resources
- local lookuptypes = resources.lookuptypes
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 1, pass %s",pass)
- end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local boundingbox = description.boundingbox
- if boundingbox then
- description.boundingbox = pack_indexed(boundingbox)
- end
- local slookups = description.slookups
- if slookups then
- for tag, slookup in next, slookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- local t = slookup[2] if t then slookup[2] = pack_indexed(t) end
- local t = slookup[3] if t then slookup[3] = pack_indexed(t) end
- elseif what ~= "substitution" then
- slookups[tag] = pack_indexed(slookup) -- true is new
- end
- end
- end
- local mlookups = description.mlookups
- if mlookups then
- for tag, mlookup in next, mlookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- for i=1,#mlookup do
- local lookup = mlookup[i]
- local t = lookup[2] if t then lookup[2] = pack_indexed(t) end
- local t = lookup[3] if t then lookup[3] = pack_indexed(t) end
- end
- elseif what ~= "substitution" then
- for i=1,#mlookup do
- mlookup[i] = pack_indexed(mlookup[i]) -- true is new
- end
- end
- end
- end
- local kerns = description.kerns
- if kerns then
- for tag, kern in next, kerns do
- kerns[tag] = pack_flat(kern)
- end
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- for tag, kern in next, kerns do
- kerns[tag] = pack_normal(kern)
- end
- end
- end
- local anchors = description.anchors
- if anchors then
- for what, anchor in next, anchors do
- if what == "baselig" then
- for _, a in next, anchor do
- for k=1,#a do
- a[k] = pack_indexed(a[k])
- end
- end
- else
- for k, v in next, anchor do
- anchor[k] = pack_indexed(v)
- end
- end
- end
- end
- local altuni = description.altuni
- if altuni then
- for i=1,#altuni do
- altuni[i] = pack_flat(altuni[i])
- end
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do
- local rule = rules[i]
- local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes
- local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have ""
- -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases)
- end
- end
- end
- end
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookup in next, anchor_to_lookup do
- anchor_to_lookup[anchor] = pack_normal(lookup)
- end
- end
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup, anchor in next, lookup_to_anchor do
- lookup_to_anchor[lookup] = pack_normal(anchor)
- end
- end
- local sequences = resources.sequences
- if sequences then
- for feature, sequence in next, sequences do
- local flags = sequence.flags
- if flags then
- sequence.flags = pack_normal(flags)
- end
- local subtables = sequence.subtables
- if subtables then
- sequence.subtables = pack_normal(subtables)
- end
- local features = sequence.features
- if features then
- for script, feature in next, features do
- features[script] = pack_normal(feature)
- end
- end
- local order = sequence.order
- if order then
- sequence.order = pack_indexed(order)
- end
- local markclass = sequence.markclass
- if markclass then
- sequence.markclass = pack_boolean(markclass)
- end
- end
- end
- local lookups = resources.lookups
- if lookups then
- for name, lookup in next, lookups do
- local flags = lookup.flags
- if flags then
- lookup.flags = pack_normal(flags)
- end
- local subtables = lookup.subtables
- if subtables then
- lookup.subtables = pack_normal(subtables)
- end
- end
- end
- local features = resources.features
- if features then
- for _, what in next, glists do
- local list = features[what]
- if list then
- for feature, spec in next, list do
- list[feature] = pack_normal(spec)
- end
- end
- end
- end
- if not success(1,pass) then
- return
- end
- end
- if nt > 0 then
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 2, pass %s",pass)
- end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local kerns = description.kerns
- if kerns then
- description.kerns = pack_normal(kerns)
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- math.kerns = pack_normal(kerns)
- end
- end
- local anchors = description.anchors
- if anchors then
- description.anchors = pack_normal(anchors)
- end
- local mlookups = description.mlookups
- if mlookups then
- for tag, mlookup in next, mlookups do
- mlookups[tag] = pack_normal(mlookup)
- end
- end
- local altuni = description.altuni
- if altuni then
- description.altuni = pack_normal(altuni)
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do -- was next loop
- local rule = rules[i]
- local r = rule.before if r then rule.before = pack_normal(r) end
- local r = rule.after if r then rule.after = pack_normal(r) end
- local r = rule.current if r then rule.current = pack_normal(r) end
- end
- end
- end
- end
- local sequences = resources.sequences
- if sequences then
- for feature, sequence in next, sequences do
- sequence.features = pack_normal(sequence.features)
- end
- end
- if not success(2,pass) then
- -- return
- end
- end
-
- for pass=1,2 do
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local slookups = description.slookups
- if slookups then
- description.slookups = pack_normal(slookups)
- end
- local mlookups = description.mlookups
- if mlookups then
- description.mlookups = pack_normal(mlookups)
- end
- end
- end
-
- end
- end
-end
-
-local unpacked_mt = {
- __index =
- function(t,k)
- t[k] = false
- return k -- next time true
- end
-}
-
-function otf.unpackdata(data)
-
- if data then
- local tables = data.tables
- if tables then
- local resources = data.resources
- local lookuptypes = resources.lookuptypes
- local unpacked = { }
- setmetatable(unpacked,unpacked_mt)
- for unicode, description in next, data.descriptions do
- local tv = tables[description.boundingbox]
- if tv then
- description.boundingbox = tv
- end
- local slookups = description.slookups
- if slookups then
- local tv = tables[slookups]
- if tv then
- description.slookups = tv
- slookups = unpacked[tv]
- end
- if slookups then
- for tag, lookup in next, slookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- local tv = tables[lookup[2]]
- if tv then
- lookup[2] = tv
- end
- local tv = tables[lookup[3]]
- if tv then
- lookup[3] = tv
- end
- elseif what ~= "substitution" then
- local tv = tables[lookup]
- if tv then
- slookups[tag] = tv
- end
- end
- end
- end
- end
- local mlookups = description.mlookups
- if mlookups then
- local tv = tables[mlookups]
- if tv then
- description.mlookups = tv
- mlookups = unpacked[tv]
- end
- if mlookups then
- for tag, list in next, mlookups do
- local tv = tables[list]
- if tv then
- mlookups[tag] = tv
- list = unpacked[tv]
- end
- if list then
- local what = lookuptypes[tag]
- if what == "pair" then
- for i=1,#list do
- local lookup = list[i]
- local tv = tables[lookup[2]]
- if tv then
- lookup[2] = tv
- end
- local tv = tables[lookup[3]]
- if tv then
- lookup[3] = tv
- end
- end
- elseif what ~= "substitution" then
- for i=1,#list do
- local tv = tables[list[i]]
- if tv then
- list[i] = tv
- end
- end
- end
- end
- end
- end
- end
- local kerns = description.kerns
- if kerns then
- local tm = tables[kerns]
- if tm then
- description.kerns = tm
- kerns = unpacked[tm]
- end
- if kerns then
- for k, kern in next, kerns do
- local tv = tables[kern]
- if tv then
- kerns[k] = tv
- end
- end
- end
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- local tm = tables[kerns]
- if tm then
- math.kerns = tm
- kerns = unpacked[tm]
- end
- if kerns then
- for k, kern in next, kerns do
- local tv = tables[kern]
- if tv then
- kerns[k] = tv
- end
- end
- end
- end
- end
- local anchors = description.anchors
- if anchors then
- local ta = tables[anchors]
- if ta then
- description.anchors = ta
- anchors = unpacked[ta]
- end
- if anchors then
- for tag, anchor in next, anchors do
- if tag == "baselig" then
- for _, list in next, anchor do
- for i=1,#list do
- local tv = tables[list[i]]
- if tv then
- list[i] = tv
- end
- end
- end
- else
- for a, data in next, anchor do
- local tv = tables[data]
- if tv then
- anchor[a] = tv
- end
- end
- end
- end
- end
- end
- local altuni = description.altuni
- if altuni then
- local altuni = tables[altuni]
- if altuni then
- description.altuni = altuni
- for i=1,#altuni do
- local tv = tables[altuni[i]]
- if tv then
- altuni[i] = tv
- end
- end
- end
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do -- was next loop
- local rule = rules[i]
- local before = rule.before
- if before then
- local tv = tables[before]
- if tv then
- rule.before = tv
- before = unpacked[tv]
- end
- if before then
- for i=1,#before do
- local tv = tables[before[i]]
- if tv then
- before[i] = tv
- end
- end
- end
- end
- local after = rule.after
- if after then
- local tv = tables[after]
- if tv then
- rule.after = tv
- after = unpacked[tv]
- end
- if after then
- for i=1,#after do
- local tv = tables[after[i]]
- if tv then
- after[i] = tv
- end
- end
- end
- end
- local current = rule.current
- if current then
- local tv = tables[current]
- if tv then
- rule.current = tv
- current = unpacked[tv]
- end
- if current then
- for i=1,#current do
- local tv = tables[current[i]]
- if tv then
- current[i] = tv
- end
- end
- end
- end
- local replacements = rule.replacements
- if replacements then
- local tv = tables[replacements]
- if tv then
- rule.replacements = tv
- end
- end
- -- local fore = rule.fore
- -- if fore then
- -- local tv = tables[fore]
- -- if tv then
- -- rule.fore = tv
- -- end
- -- end
- -- local back = rule.back
- -- if back then
- -- local tv = tables[back]
- -- if tv then
- -- rule.back = tv
- -- end
- -- end
- -- local names = rule.names
- -- if names then
- -- local tv = tables[names]
- -- if tv then
- -- rule.names = tv
- -- end
- -- end
- --
- local lookups = rule.lookups
- if lookups then
- local tv = tables[lookups]
- if tv then
- rule.lookups = tv
- end
- end
- end
- end
- end
- end
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookup in next, anchor_to_lookup do
- local tv = tables[lookup]
- if tv then
- anchor_to_lookup[anchor] = tv
- end
- end
- end
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup, anchor in next, lookup_to_anchor do
- local tv = tables[anchor]
- if tv then
- lookup_to_anchor[lookup] = tv
- end
- end
- end
- local ls = resources.sequences
- if ls then
- for _, feature in next, ls do
- local flags = feature.flags
- if flags then
- local tv = tables[flags]
- if tv then
- feature.flags = tv
- end
- end
- local subtables = feature.subtables
- if subtables then
- local tv = tables[subtables]
- if tv then
- feature.subtables = tv
- end
- end
- local features = feature.features
- if features then
- local tv = tables[features]
- if tv then
- feature.features = tv
- features = unpacked[tv]
- end
- if features then
- for script, data in next, features do
- local tv = tables[data]
- if tv then
- features[script] = tv
- end
- end
- end
- end
- local order = feature.order
- if order then
- local tv = tables[order]
- if tv then
- feature.order = tv
- end
- end
- local markclass = feature.markclass
- if markclass then
- local tv = tables[markclass]
- if tv then
- feature.markclass = tv
- end
- end
- end
- end
- local lookups = resources.lookups
- if lookups then
- for _, lookup in next, lookups do
- local flags = lookup.flags
- if flags then
- local tv = tables[flags]
- if tv then
- lookup.flags = tv
- end
- end
- local subtables = lookup.subtables
- if subtables then
- local tv = tables[subtables]
- if tv then
- lookup.subtables = tv
- end
- end
- end
- end
- local features = resources.features
- if features then
- for _, what in next, glists do
- local feature = features[what]
- if feature then
- for tag, spec in next, feature do
- local tv = tables[spec]
- if tv then
- feature[tag] = tv
- end
- end
- end
- end
- end
- data.tables = nil
- end
- end
-end
diff --git a/tex/context/base/mkiv/font-otr.lua b/tex/context/base/mkiv/font-otr.lua
index 4f93c5579..5bac75052 100644
--- a/tex/context/base/mkiv/font-otr.lua
+++ b/tex/context/base/mkiv/font-otr.lua
@@ -65,11 +65,12 @@ if not modules then modules = { } end modules ['font-otr'] = {
-- require("char-ini")
-- end
-local next, type = next, type
+local next, type, tonumber = next, type, tonumber
local byte, lower, char, gsub = string.byte, string.lower, string.char, string.gsub
local floor, round = math.floor, math.round
local P, R, S, C, Cs, Cc, Ct, Carg, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Carg, lpeg.Cmt
local lpegmatch = lpeg.match
+local rshift = bit32.rshift
local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
@@ -660,12 +661,10 @@ local widths = {
setmetatableindex(weights, function(t,k)
local r = floor((k + 50) / 100) * 100
local v = (r > 900 and "black") or rawget(t,r) or "normal"
--- print("weight:",k,r,v)
return v
end)
setmetatableindex(widths,function(t,k)
--- print("width:",k)
return "normal"
end)
@@ -1346,7 +1345,7 @@ formatreaders[4] = function(f,fontdata,offset)
offsets[i] = readushort(f)
end
-- format length language nofsegments searchrange entryselector rangeshift 4-tables
- local size = (length - 2 * 2 - 5 * 2 - 4 * nofsegments * 2) / 2
+ local size = (length - 2 * 2 - 5 * 2 - 4 * 2 * nofsegments) / 2
for i=1,size-1 do
indices[i] = readushort(f)
end
@@ -1514,7 +1513,7 @@ end
formatreaders[13] = function(f,fontdata,offset)
--
- -- this fector is only used for simple fallback fonts
+ -- this vector is only used for simple fallback fonts
--
setposition(f,offset+2+2+4+4) -- skip format reserved length language
local mapping = fontdata.mapping
@@ -1811,7 +1810,7 @@ function readers.kern(f,fontdata,specification)
local length = readushort(f)
local coverage = readushort(f)
-- bit 8-15 of coverage: format 0 or 2
- local format = bit32.rshift(coverage,8) -- is this ok?
+ local format = rshift(coverage,8) -- is this ok?
if format == 0 then
local nofpairs = readushort(f)
local searchrange = readushort(f)
@@ -2130,9 +2129,9 @@ local function readdata(f,offset,specification)
if factors then
specification.factors = factors
fontdata.factors = factors
- report("factors: % t",factors)
- else
- report("bad factors")
+ -- report("factors: % t",factors)
+ -- else
+ -- report("bad factors")
end
else
-- report("unknown instance")
diff --git a/tex/context/base/mkiv/font-ots.lua b/tex/context/base/mkiv/font-ots.lua
index 04ecaa923..3e338f614 100644
--- a/tex/context/base/mkiv/font-ots.lua
+++ b/tex/context/base/mkiv/font-ots.lua
@@ -108,45 +108,59 @@ mechanisms. Both put some constraints on the code here.
-- Remark: We can provide a fast loop when there are no disc nodes (tests show a 1%
-- gain). Smaller functions might perform better cache-wise. But ... memory becomes
-- faster anyway, so ...
+--
+-- Remark: Some optimizations made sense for 5.2 but seem less important for 5.3 but
+-- anyway served their purpose.
+--
+-- Todo: just (0=l2r and 1=r2l) or maybe (r2l = true)
local type, next, tonumber = type, next, tonumber
local random = math.random
local formatters = string.formatters
local insert = table.insert
-local registertracker = trackers.register
-
-local logs = logs
-local trackers = trackers
-local nodes = nodes
-local attributes = attributes
-local fonts = fonts
-
-local otf = fonts.handlers.otf
-local tracers = nodes.tracers
-
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-local trace_plugins = false registertracker("otf.plugins", function(v) trace_plugins = v end)
-
-local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
-local trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
-local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
-local trace_testruns = false registertracker("otf.testruns", function(v) trace_testruns = v end)
-
-local optimizekerns = true
+local registertracker = trackers.register
+
+local logs = logs
+local trackers = trackers
+local nodes = nodes
+local attributes = attributes
+local fonts = fonts
+
+local otf = fonts.handlers.otf
+local tracers = nodes.tracers
+
+local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
+local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
+local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
+local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
+local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
+local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
+local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
+local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
+local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
+local trace_plugins = false registertracker("otf.plugins", function(v) trace_plugins = v end)
+local trace_chains = false registertracker("otf.chains", function(v) trace_chains = v end)
+
+local trace_kernruns = false registertracker("otf.kernruns", function(v) trace_kernruns = v end)
+----- trace_discruns = false registertracker("otf.discruns", function(v) trace_discruns = v end)
+local trace_compruns = false registertracker("otf.compruns", function(v) trace_compruns = v end)
+local trace_testruns = false registertracker("otf.testruns", function(v) trace_testruns = v end)
+
+local forcediscretionaries = false
+local forcepairadvance = false -- for testing
+
+directives.register("otf.forcediscretionaries",function(v)
+ forcediscretionaries = v
+end)
+
+directives.register("otf.forcepairadvance",function(v)
+ forcepairadvance = v
+end)
local report_direct = logs.reporter("fonts","otf direct")
local report_subchain = logs.reporter("fonts","otf subchain")
@@ -155,18 +169,17 @@ local report_process = logs.reporter("fonts","otf process")
local report_warning = logs.reporter("fonts","otf warning")
local report_run = logs.reporter("fonts","otf run")
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
-registertracker("otf.actions","otf.replacements,otf.positions")
-registertracker("otf.injections","nodes.injections")
-registertracker("otf.sample","otf.steps,otf.actions,otf.analyzing")
+registertracker("otf.substitutions", "otf.singles","otf.multiples","otf.alternatives","otf.ligatures")
+registertracker("otf.positions", "otf.marks","otf.kerns","otf.cursive")
+registertracker("otf.actions", "otf.substitutions","otf.positions")
+registertracker("otf.sample", "otf.steps","otf.substitutions","otf.positions","otf.analyzing")
+registertracker("otf.sample.silent", "otf.steps=silent","otf.substitutions","otf.positions","otf.analyzing")
local nuts = nodes.nuts
local tonode = nuts.tonode
local tonut = nuts.tonut
local getfield = nuts.getfield
-local setfield = nuts.setfield
local getnext = nuts.getnext
local setnext = nuts.setnext
local getprev = nuts.getprev
@@ -192,22 +205,25 @@ local getdir = nuts.getdir
local getwidth = nuts.getwidth
local ischar = nuts.is_char
+local usesfont = nuts.uses_font
local insert_node_after = nuts.insert_after
local copy_node = nuts.copy
local copy_node_list = nuts.copy_list
+local remove_node = nuts.remove
local find_node_tail = nuts.tail
local flush_node_list = nuts.flush_list
local flush_node = nuts.flush_node
local end_of_math = nuts.end_of_math
local traverse_nodes = nuts.traverse
-local traverse_id = nuts.traverse_id
+----- traverse_id = nuts.traverse_id
local set_components = nuts.set_components
local take_components = nuts.take_components
local count_components = nuts.count_components
local copy_no_components = nuts.copy_no_components
local copy_only_glyphs = nuts.copy_only_glyphs
+local setmetatable = setmetatable
local setmetatableindex = table.setmetatableindex
----- zwnj = 0x200C
@@ -234,7 +250,8 @@ local injections = nodes.injections
local setmark = injections.setmark
local setcursive = injections.setcursive
local setkern = injections.setkern
-local setpair = injections.setpair
+local setmove = injections.setmove
+local setposition = injections.setposition
local resetinjection = injections.reset
local copyinjection = injections.copy
local setligaindex = injections.setligaindex
@@ -265,14 +282,15 @@ local factor = 0
local threshold = 0
local checkmarks = false
+local discs = false
+local spaces = false
+
local sweepnode = nil
-local sweepprev = nil
-local sweepnext = nil
-local sweephead = { }
+local sweephead = { } -- we don't nil entries but false them (no collection and such)
-local notmatchpre = { }
-local notmatchpost = { }
-local notmatchreplace = { }
+local notmatchpre = { } -- to be checked: can we use false instead of nil / what if a == b tests
+local notmatchpost = { } -- to be checked: can we use false instead of nil / what if a == b tests
+local notmatchreplace = { } -- to be checked: can we use false instead of nil / what if a == b tests
local handlers = { }
@@ -283,16 +301,19 @@ local checkstep = (tracers and tracers.steppers.check) or function()
local registerstep = (tracers and tracers.steppers.register) or function() end
local registermessage = (tracers and tracers.steppers.message) or function() end
-local function checkdisccontent(d)
- local pre, post, replace = getdisc(d)
- if pre then for n in traverse_id(glue_code,pre) do print("pre",nodes.idstostring(pre)) break end end
- if post then for n in traverse_id(glue_code,post) do print("pos",nodes.idstostring(post)) break end end
- if replace then for n in traverse_id(glue_code,replace) do print("rep",nodes.idstostring(replace)) break end end
-end
+-- local function checkdisccontent(d)
+-- local pre, post, replace = getdisc(d)
+-- if pre then for n in traverse_id(glue_code,pre) do report("pre: %s",nodes.idstostring(pre)) break end end
+-- if post then for n in traverse_id(glue_code,post) do report("pos: %s",nodes.idstostring(post)) break end end
+-- if replace then for n in traverse_id(glue_code,replace) do report("rep: %s",nodes.idstostring(replace)) break end end
+-- end
local function logprocess(...)
if trace_steps then
registermessage(...)
+ if trace_steps == "silent" then
+ return
+ end
end
report_direct(...)
end
@@ -301,56 +322,67 @@ local function logwarning(...)
report_direct(...)
end
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
+local gref do
-local function gref(n) -- currently the same as in font-otb
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = f_unicode(ni)
- nam[i] = di and di.name or "-"
+ local f_unicode = formatters["U+%X"] -- was ["%U"]
+ local f_uniname = formatters["U+%X (%s)"] -- was ["%U (%s)"]
+ local f_unilist = formatters["% t"]
+
+ gref = function(n) -- currently the same as in font-otb
+ if type(n) == "number" then
+ local description = descriptions[n]
+ local name = description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local t = { }
+ for i=1,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- later we will start at 2
+ local di = descriptions[ni]
+ local nn = di and di.name
+ if nn then
+ t[#t+1] = f_uniname(ni,nn)
+ else
+ t[#t+1] = f_unicode(ni)
+ end
+ end
end
+ return f_unilist(t)
+ else
+ return ""
end
- return f_unilist(num,nam)
- else
- return ""
end
+
end
local function cref(dataset,sequence,index)
if not dataset then
return "no valid dataset"
- elseif index then
- return formatters["feature %a, type %a, chain lookup %a, index %a"](dataset[4],sequence.type,sequence.name,index)
+ end
+ local merged = sequence.merged and "merged " or ""
+ if index then
+ return formatters["feature %a, type %a, %schain lookup %a, index %a"](
+ dataset[4],sequence.type,merged,sequence.name,index)
else
- return formatters["feature %a, type %a, chain lookup %a"](dataset[4],sequence.type,sequence.name)
+ return formatters["feature %a, type %a, %schain lookup %a"](
+ dataset[4],sequence.type,merged,sequence.name)
end
end
local function pref(dataset,sequence)
- return formatters["feature %a, type %a, lookup %a"](dataset[4],sequence.type,sequence.name)
+ return formatters["feature %a, type %a, %slookup %a"](
+ dataset[4],sequence.type,sequence.merged and "merged " or "",sequence.name)
end
local function mref(rlmode)
- if not rlmode or rlmode == 0 then
- return "---"
- elseif rlmode == -1 or rlmode == "+TRT" then
- return "r2l"
- else
+ if not rlmode or rlmode >= 0 then
return "l2r"
+ else
+ return "r2l"
end
end
@@ -391,7 +423,7 @@ local function flattendisk(head,disc)
elseif next then
return next, next
else
- return -- maybe warning
+ -- return -- maybe warning
end
else
if replace then
@@ -469,124 +501,15 @@ local function markstoligature(head,start,stop,char)
end
end
--- local function toligature(head,start,stop,char,dataset,sequence,markflag,discfound) -- brr head
--- if getattr(start,a_noligature) == 1 then
--- -- so we can do: e\noligature{ff}e e\noligature{f}fie (we only look at the first)
--- return head, start
--- end
--- if start == stop and getchar(start) == char then
--- resetinjection(start)
--- setchar(start,char)
--- return head, start
--- end
--- local prev = getprev(start)
--- local next = getnext(stop)
--- local comp = start
--- setprev(start)
--- setnext(stop)
--- local base = copy_no_components(start,copyinjection)
--- if start == head then
--- head = base
--- end
--- resetinjection(base)
--- setchar(base,char)
--- setsubtype(base,ligature_code)
--- set_components(base,comp)
--- setlink(prev,base,next)
--- if not discfound then
--- local deletemarks = markflag ~= "mark"
--- local components = start
--- local baseindex = 0
--- local componentindex = 0
--- local head = base
--- local current = base
--- -- first we loop over the glyphs in start .. stop
--- while start do
--- local char = getchar(start)
--- if not marks[char] then
--- baseindex = baseindex + componentindex
--- componentindex = getcomponentindex(start,marks)
--- elseif not deletemarks then -- quite fishy
--- setligaindex(start,baseindex + getligaindex(start,componentindex))
--- if trace_marks then
--- logwarning("%s: keep mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
--- end
--- local n = copy_node(start)
--- copyinjection(n,start)
--- head, current = insert_node_after(head,current,n) -- unlikely that mark has components
--- elseif trace_marks then
--- logwarning("%s: delete mark %s",pref(dataset,sequence),gref(char))
--- end
--- start = getnext(start)
--- end
--- -- we can have one accent as part of a lookup and another following
--- -- local start = components -- was wrong (component scanning was introduced when more complex ligs in devanagari was added)
--- local start = getnext(current)
--- while start do
--- local char = ischar(start)
--- if char then
--- if marks[char] then
--- setligaindex(start,baseindex + getligaindex(start,componentindex))
--- if trace_marks then
--- logwarning("%s: set mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
--- end
--- start = getnext(start)
--- else
--- break
--- end
--- else
--- break
--- end
--- end
--- else
--- -- discfound ... forget about marks .. probably no scripts that hyphenate and have marks
--- local discprev, discnext = getboth(discfound)
--- if discprev and discnext then
--- -- we assume normalization in context, and don't care about generic ... especially
--- -- \- can give problems as there we can have a negative char but that won't match
--- -- anyway
--- local pre, post, replace, pretail, posttail, replacetail = getdisc(discfound,true)
--- if not replace then -- todo: signal simple hyphen
--- local prev = getprev(base)
--- local current = comp
--- local previous = nil
--- local copied = nil
--- while current do
--- if getid(current) == glyph_code then
--- local n = copy_node(current)
--- if copied then
--- setlink(previous,n)
--- else
--- copied = n
--- end
--- previous = n
--- end
--- current = getnext(current)
--- end
--- setprev(discnext) -- also blocks funny assignments
--- setnext(discprev) -- also blocks funny assignments
--- if pre then
--- setlink(discprev,pre)
--- end
--- pre = comp
--- if post then
--- setlink(posttail,discnext)
--- setprev(post)
--- else
--- post = discnext
--- end
--- setlink(prev,discfound,next)
--- setboth(base)
--- set_components(base,copied)
--- setdisc(discfound,pre,post,base) -- was discretionary_code
--- base = prev -- restart
--- end
--- end
--- end
--- return head, base
--- end
+-- Remark for Kai: (some arabic fonts do mark + mark = other mark and such)
+--
+-- The hasmarks is needed for ligatures of marks that are part of a ligature in
+-- which case we assume that we can delete the marks anyway (we can always become
+-- more clever if needed) .. in fact the whole logic here should be redone. We're
+-- in the not discfound branch then. We now have skiphash too so we can be more
+-- selective if needed (todo).
-local function toligature(head,start,stop,char,dataset,sequence,markflag,discfound) -- brr head
+local function toligature(head,start,stop,char,dataset,sequence,skiphash,discfound,hasmarks) -- brr head
if getattr(start,a_noligature) == 1 then
-- so we can do: e\noligature{ff}e e\noligature{f}fie (we only look at the first)
return head, start
@@ -611,25 +534,28 @@ local function toligature(head,start,stop,char,dataset,sequence,markflag,discfou
set_components(base,comp)
setlink(prev,base,next)
if not discfound then
- local deletemarks = markflag ~= "mark"
+ local deletemarks = not skiphash or hasmarks
local components = start
local baseindex = 0
local componentindex = 0
local head = base
local current = base
- -- first we loop over the glyphs in start .. stop
+ -- first we loop over the glyphs in start ... stop
while start do
local char = getchar(start)
if not marks[char] then
baseindex = baseindex + componentindex
componentindex = count_components(start,marks)
- elseif not deletemarks then -- quite fishy
+ -- we can be more clever here: "not deletemarks or (skiphash and not skiphash[char])"
+ -- and such:
+ elseif not deletemarks then
+ -- we can get a loop when the font expects otherwise (i.e. unexpected deletemarks)
setligaindex(start,baseindex + getligaindex(start,componentindex))
if trace_marks then
logwarning("%s: keep mark %s, gets index %s",pref(dataset,sequence),gref(char),getligaindex(start))
end
local n = copy_node(start)
- copyinjection(n,start)
+ copyinjection(n,start) -- is this ok ? we position later anyway
head, current = insert_node_after(head,current,n) -- unlikely that mark has components
elseif trace_marks then
logwarning("%s: delete mark %s",pref(dataset,sequence),gref(char))
@@ -641,6 +567,7 @@ local function toligature(head,start,stop,char,dataset,sequence,markflag,discfou
while start do
local char = ischar(start)
if char then
+ -- also something skiphash here?
if marks[char] then
setligaindex(start,baseindex + getligaindex(start,componentindex))
if trace_marks then
@@ -684,7 +611,11 @@ local function toligature(head,start,stop,char,dataset,sequence,markflag,discfou
-- here components have a pointer so we can't free it!
set_components(base,copied)
replace = base
- setdisc(discfound,pre,post,replace) -- was discretionary_code
+ if forcediscretionaries then
+ setdisc(discfound,pre,post,replace,discretionary_code)
+ else
+ setdisc(discfound,pre,post,replace)
+ end
base = prev
end
end
@@ -692,7 +623,7 @@ local function toligature(head,start,stop,char,dataset,sequence,markflag,discfou
return head, base
end
-local function multiple_glyphs(head,start,multiple,ignoremarks,what)
+local function multiple_glyphs(head,start,multiple,skiphash,what) -- what to do with skiphash matches here
local nofmultiples = #multiple
if nofmultiples > 0 then
resetinjection(start)
@@ -804,21 +735,25 @@ function handlers.gsub_alternate(head,start,dataset,sequence,alternative)
return head, start, true
end
-function handlers.gsub_multiple(head,start,dataset,sequence,multiple)
+function handlers.gsub_multiple(head,start,dataset,sequence,multiple,rlmode,skiphash)
if trace_multiples then
logprocess("%s: replacing %s by multiple %s",pref(dataset,sequence),gref(getchar(start)),gref(multiple))
end
- return multiple_glyphs(head,start,multiple,sequence.flags[1],dataset[1])
+ return multiple_glyphs(head,start,multiple,skiphash,dataset[1])
end
-function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
+-- Don't we deal with disc otherwise now? I need to check if the next one can be
+-- simplified. Anyway, it can be way messier: marks that get removed as well as
+-- marks that are kept.
+
+function handlers.gsub_ligature(head,start,dataset,sequence,ligature,rlmode,skiphash)
local current = getnext(start)
if not current then
return head, start, false, nil
end
local stop = nil
local startchar = getchar(start)
- if marks[startchar] then
+ if skiphash and skiphash[startchar] then
while current do
local char = ischar(current,currentfont)
if char then
@@ -849,15 +784,16 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
-- ok, goto next lookup
end
end
- else
- local skipmark = sequence.flags[1]
+ else -- is the check for disc still valid here ? and why only replace then
local discfound = false
local lastdisc = nil
+ local hasmarks = marks[startchar]
while current do
local char, id = ischar(current,currentfont)
if char then
- if skipmark and marks[char] then
+ if skiphash and skiphash[char] then
current = getnext(current)
+ -- if stop then stop = current end -- ?
else -- ligature is a tree
local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
if lg then
@@ -865,6 +801,9 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
discfound = lastdisc
lastdisc = nil
end
+ if marks[char] then
+ hasmarks = true
+ end
stop = current -- needed for fake so outside then
ligature = lg
current = getnext(current)
@@ -876,8 +815,20 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
-- kind of weird
break
elseif id == disc_code then
- -- tricky .. we also need to do pre here
- local replace = getfield(current,"replace")
+ --
+ -- Kai: see chainprocs, we probably could do the same here or was there a reason
+ -- why we kept the replace check here.
+ --
+ -- if not discfound then
+ -- discfound = current
+ -- end
+ -- if current == stop then
+ -- break -- okay? or before the disc
+ -- else
+ -- current = getnext(current)
+ -- end
+ --
+ local replace = getfield(current,"replace") -- hm: pre and post
if replace then
-- of{f-}{}{f}e o{f-}{}{f}fe o{-}{}{ff}e (oe and ff ligature)
-- we can end up here when we have a start run .. testruns start at a disc but
@@ -887,6 +838,9 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
if char then
local lg = ligature[char] -- can there be multiple in a row? maybe in a bad font
if lg then
+ if marks[char] then
+ hasmarks = true -- very unlikely
+ end
ligature = lg
replace = getnext(replace)
else
@@ -909,10 +863,10 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
if stop then
if trace_ligatures then
local stopchar = getchar(stop)
- head, start = toligature(head,start,stop,lig,dataset,sequence,skipmark,discfound)
+ head, start = toligature(head,start,stop,lig,dataset,sequence,skiphash,discfound,hasmarks)
logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(dataset,sequence),gref(startchar),gref(stopchar),gref(lig))
else
- head, start = toligature(head,start,stop,lig,dataset,sequence,skipmark,discfound)
+ head, start = toligature(head,start,stop,lig,dataset,sequence,skiphash,discfound,hasmarks)
end
else
-- weird but happens (in some arabic font)
@@ -930,24 +884,24 @@ function handlers.gsub_ligature(head,start,dataset,sequence,ligature)
return head, start, false, discfound
end
-function handlers.gpos_single(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
+function handlers.gpos_single(head,start,dataset,sequence,kerns,rlmode,skiphash,step,injection)
local startchar = getchar(start)
- if step.format == "pair" then
- local dx, dy, w, h = setpair(start,factor,rlmode,sequence.flags[4],kerns,injection)
+ local format = step.format
+ if format == "single" or type(kerns) == "table" then -- the table check can go
+ local dx, dy, w, h = setposition(0,start,factor,rlmode,kerns,injection)
if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(dataset,sequence),gref(startchar),dx,dy,w,h)
+ logprocess("%s: shifting single %s by %s xy (%p,%p) and wh (%p,%p)",pref(dataset,sequence),gref(startchar),format,dx,dy,w,h)
end
else
- -- needs checking .. maybe no kerns format for single
- local k = setkern(start,factor,rlmode,kerns,injection)
+ local k = (format == "move" and setmove or setkern)(start,factor,rlmode,kerns,injection)
if trace_kerns then
- logprocess("%s: shifting single %s by %p",pref(dataset,sequence),gref(startchar),k)
+ logprocess("%s: shifting single %s by %s %p",pref(dataset,sequence),gref(startchar),format,k)
end
end
- return head, start, false
+ return head, start, true
end
-function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,step,i,injection)
+function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,skiphash,step,injection)
local snext = getnext(start)
if not snext then
return head, start, false
@@ -956,48 +910,49 @@ function handlers.gpos_pair(head,start,dataset,sequence,kerns,rlmode,step,i,inje
while snext do
local nextchar = ischar(snext,currentfont)
if nextchar then
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
+ if skiphash and skiphash[nextchar] then -- includes marks too when flag
+ prev = snext
snext = getnext(snext)
- elseif not krn then
- break
- elseif step.format == "pair" then
- local a, b = krn[1], krn[2]
- if optimizekerns then
- -- this permits a mixed table, but we could also decide to optimize this
- -- in the loader and use format 'kern'
- if not b and a[1] == 0 and a[2] == 0 and a[4] == 0 then
- local k = setkern(snext,factor,rlmode,a[3],injection)
+ else
+ local krn = kerns[nextchar]
+ if not krn then
+ break
+ end
+ local format = step.format
+ if format == "pair" then
+ local a, b = krn[1], krn[2]
+ if a == true then
+ -- zero
+ elseif a then -- #a > 0
+ local x, y, w, h = setposition(1,start,factor,rlmode,a,injection)
if trace_kerns then
- logprocess("%s: shifting single %s by %p",pref(dataset,sequence),gref(nextchar),k)
+ local startchar = getchar(start)
+ logprocess("%s: shifting first of pair %s and %s by xy (%p,%p) and wh (%p,%p) as %s",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h,injection or "injections")
end
- return head, start, true
end
- end
- if a and #a > 0 then
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,injection)
- if trace_kerns then
- local startchar = getchar(start)
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p) as %s",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h,injection or "injections")
+ if b == true then
+ -- zero
+ start = snext -- cf spec
+ elseif b then -- #b > 0
+ local x, y, w, h = setposition(2,snext,factor,rlmode,b,injection)
+ if trace_kerns then
+ local startchar = getchar(snext)
+ logprocess("%s: shifting second of pair %s and %s by xy (%p,%p) and wh (%p,%p) as %s",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h,injection or "injections")
+ end
+ start = snext -- cf spec
+ elseif forcepairadvance then
+ start = snext -- for testing, not cf spec
end
- end
- if b and #b > 0 then
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,injection)
+ return head, start, true
+ elseif krn ~= 0 then
+ local k = (format == "move" and setmove or setkern)(snext,factor,rlmode,krn,injection)
if trace_kerns then
- local startchar = getchar(snext)
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p) as %s",pref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h,injection or "injections")
+ logprocess("%s: inserting %s %p between %s and %s as %s",pref(dataset,sequence),format,k,gref(getchar(prev)),gref(nextchar),injection or "injections")
end
+ return head, start, true
+ else -- can't happen
+ break
end
- return head, start, true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn,injection)
- if trace_kerns then
- logprocess("%s: inserting kern %p between %s and %s as %s",pref(dataset,sequence),k,gref(getchar(prev)),gref(nextchar),injection or "injections")
- end
- return head, start, true
- else -- can't happen
- break
end
else
break
@@ -1012,7 +967,7 @@ end
we need to explicitly test for basechar, baselig and basemark entries.
--ldx]]--
-function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
+function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode,skiphash)
local markchar = getchar(start)
if marks[markchar] then
local base = getprev(start) -- [glyph] [start=mark]
@@ -1047,10 +1002,13 @@ function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
local ma = markanchors[2]
local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],false,checkmarks)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- pref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ logprocess("%s, bound %s, anchoring mark %s to basechar %s => (%p,%p)",
+ pref(dataset,sequence),bound,gref(markchar),gref(basechar),dx,dy)
end
return head, start, true
+ elseif trace_bugs then
+ -- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ logwarning("%s: mark %s is not anchored to %s",pref(dataset,sequence),gref(markchar),gref(basechar))
end
elseif trace_bugs then
logwarning("%s: nothing preceding, case %i",pref(dataset,sequence),1)
@@ -1064,7 +1022,7 @@ function handlers.gpos_mark2base(head,start,dataset,sequence,markanchors,rlmode)
return head, start, false
end
-function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlmode)
+function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlmode,skiphash)
local markchar = getchar(start)
if marks[markchar] then
local base = getprev(start) -- [glyph] [optional marks] [start=mark]
@@ -1103,8 +1061,8 @@ function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlm
if ba then
local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],false,checkmarks)
if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- pref(dataset,sequence),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ logprocess("%s, index %s, bound %s, anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(dataset,sequence),index,bound,gref(markchar),gref(basechar),index,dx,dy)
end
return head, start, true
else
@@ -1129,7 +1087,7 @@ function handlers.gpos_mark2ligature(head,start,dataset,sequence,markanchors,rlm
return head, start, false
end
-function handlers.gpos_mark2mark(head,start,dataset,sequence,markanchors,rlmode)
+function handlers.gpos_mark2mark(head,start,dataset,sequence,markanchors,rlmode,skiphash)
local markchar = getchar(start)
if marks[markchar] then
local base = getprev(start) -- [glyph] [basemark] [start=mark]
@@ -1152,8 +1110,8 @@ function handlers.gpos_mark2mark(head,start,dataset,sequence,markanchors,rlmode)
local ma = markanchors[2]
local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],true,checkmarks)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- pref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ logprocess("%s, bound %s, anchoring mark %s to basemark %s => (%p,%p)",
+ pref(dataset,sequence),bound,gref(markchar),gref(basechar),dx,dy)
end
return head, start, true
end
@@ -1165,7 +1123,7 @@ function handlers.gpos_mark2mark(head,start,dataset,sequence,markanchors,rlmode)
return head, start, false
end
-function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,step,i) -- to be checked
+function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,skiphash,step) -- to be checked
local startchar = getchar(start)
if marks[startchar] then
if trace_cursive then
@@ -1177,8 +1135,7 @@ function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,st
local nextchar = ischar(nxt,currentfont)
if not nextchar then
break
- elseif marks[nextchar] then
- -- should not happen (maybe warning)
+ elseif marks[nextchar] then -- always sequence.flags[1]
nxt = getnext(nxt)
else
local exit = exitanchors[3]
@@ -1187,9 +1144,10 @@ function handlers.gpos_cursive(head,start,dataset,sequence,exitanchors,rlmode,st
if entry then
entry = entry[2]
if entry then
- local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local r2lflag = sequence.flags[4] -- mentioned in the standard
+ local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar],r2lflag)
if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
+ logprocess("%s: moving %s to %s cursive (%p,%p) using bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,bound,mref(rlmode))
end
return head, start, true
end
@@ -1212,6 +1170,9 @@ local chainprocs = { }
local function logprocess(...)
if trace_steps then
registermessage(...)
+ if trace_steps == "silent" then
+ return
+ end
end
report_subchain(...)
end
@@ -1221,6 +1182,9 @@ local logwarning = report_subchain
local function logprocess(...)
if trace_steps then
registermessage(...)
+ if trace_steps == "silent" then
+ return
+ end
end
report_chain(...)
end
@@ -1234,7 +1198,7 @@ local logwarning = report_chain
-- in a bit weird way. There is no lookup and the replacement comes from the lookup
-- itself. It is meant mostly for dealing with Urdu.
-local function reversesub(head,start,stop,dataset,sequence,replacements,rlmode)
+local function reversesub(head,start,stop,dataset,sequence,replacements,rlmode,skiphash)
local char = getchar(start)
local replacement = replacements[char]
if replacement then
@@ -1288,17 +1252,31 @@ end
-- logwarning("%s: bad step, no proper return values",cref(dataset,sequence))
-- end
-function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,chainindex)
+local function getmapping(dataset,sequence,currentlookup)
local steps = currentlookup.steps
local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
- end
if nofsteps == 0 then
reportzerosteps(dataset,sequence)
+ currentlookup.mapping = false
+ return false
else
- local current = start
+ if nofsteps > 1 then
+ reportmoresteps(dataset,sequence)
+ end
local mapping = steps[1].coverage
+ currentlookup.mapping = mapping
+ currentlookup.format = steps[1].format
+ return mapping
+ end
+end
+
+function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
+ end
+ if mapping then
+ local current = start
while current do
local currentchar = ischar(current)
if currentchar then
@@ -1328,35 +1306,6 @@ function chainprocs.gsub_single(head,start,stop,dataset,sequence,currentlookup,c
return head, start, false
end
---[[ldx--
-Here we replace start by a sequence of new glyphs.
---ldx]]--
-
-function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
- end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
- local startchar = getchar(start)
- local replacement = steps[1].coverage[startchar]
- if not replacement or replacement == "" then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(dataset,sequence),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(dataset,sequence),gref(startchar),gref(replacement))
- end
- return multiple_glyphs(head,start,replacement,sequence.flags[1],dataset[1])
- end
- end
- return head, start, false
-end
-
--[[ldx--
Here we replace start by new glyph. First we delete the rest of the match.
--ldx]]--
@@ -1369,20 +1318,16 @@ end
-- marks come last anyway
-- are there cases where we need to delete the mark
-function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlookup)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local kind = dataset[4]
local what = dataset[1]
local value = what == true and tfmdata.shared.features[kind] or what -- todo: optimize in ctx
local current = start
- local mapping = steps[1].coverage
while current do
local currentchar = ischar(current)
if currentchar then
@@ -1391,13 +1336,13 @@ function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlooku
local choice, comment = get_alternative_glyph(current,alternatives,value)
if choice then
if trace_alternatives then
- logprocess("%s: replacing %s by alternative %a to %s, %s",cref(dataset,sequence),gref(char),choice,gref(choice),comment)
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(dataset,sequence),gref(currentchar),choice,gref(choice),comment)
end
resetinjection(start)
setchar(start,choice)
else
if trace_alternatives then
- logwarning("%s: no variant %a for %s, %s",cref(dataset,sequence),value,gref(char),comment)
+ logwarning("%s: no variant %a for %s, %s",cref(dataset,sequence),value,gref(currentchar),comment)
end
end
end
@@ -1415,33 +1360,58 @@ function chainprocs.gsub_alternate(head,start,stop,dataset,sequence,currentlooku
return head, start, false
end
+--[[ldx--
+Here we replace start by a sequence of new glyphs.
+--ldx]]--
+
+function chainprocs.gsub_multiple(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
+ end
+ if mapping then
+ local startchar = getchar(start)
+ local replacement = mapping[startchar]
+ if not replacement or replacement == "" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(dataset,sequence),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(dataset,sequence),gref(startchar),gref(replacement))
+ end
+ return multiple_glyphs(head,start,replacement,skiphash,dataset[1])
+ end
+ end
+ return head, start, false
+end
+
--[[ldx--
When we replace ligatures we use a helper that handles the marks. I might change
this function (move code inline and handle the marks by a separate function). We
assume rather stupid ligatures (no complex disc nodes).
--ldx]]--
-function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup,chainindex)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+-- compare to handlers.gsub_ligature which is more complex ... why
+
+function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local startchar = getchar(start)
- local ligatures = steps[1].coverage[startchar]
+ local ligatures = mapping[startchar]
if not ligatures then
if trace_bugs then
logwarning("%s: no ligatures starting with %s",cref(dataset,sequence,chainindex),gref(startchar))
end
else
+ local hasmarks = marks[startchar]
local current = getnext(start)
local discfound = false
local last = stop
local nofreplacements = 1
- local skipmark = currentlookup.flags[1] -- sequence.flags?
while current do
-- todo: ischar ... can there really be disc nodes here?
local id = getid(current)
@@ -1456,7 +1426,7 @@ function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup
end
else
local schar = getchar(current)
- if skipmark and marks[schar] then -- marks
+ if skiphash and skiphash[schar] then -- marks
-- if current == stop then -- maybe add this
-- break
-- else
@@ -1468,6 +1438,9 @@ function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup
ligatures = lg
last = current
nofreplacements = nofreplacements + 1
+ if marks[char] then
+ hasmarks = true
+ end
if current == stop then
break
else
@@ -1491,7 +1464,7 @@ function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup
logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(dataset,sequence,chainindex),gref(startchar),gref(getchar(stop)),gref(ligature))
end
end
- head, start = toligature(head,start,stop,ligature,dataset,sequence,skipmark,discfound)
+ head, start = toligature(head,start,stop,ligature,dataset,sequence,skiphash,discfound,hasmarks)
return head, start, true, nofreplacements, discfound
elseif trace_bugs then
if start == stop then
@@ -1505,49 +1478,43 @@ function chainprocs.gsub_ligature(head,start,stop,dataset,sequence,currentlookup
return head, start, false, 0, false
end
-function chainprocs.gpos_single(head,start,stop,dataset,sequence,currentlookup,rlmode,chainindex)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+function chainprocs.gpos_single(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local startchar = getchar(start)
- local step = steps[1]
- local kerns = step.coverage[startchar]
- if not kerns then
- -- skip
- elseif step.format == "pair" then
- local dx, dy, w, h = setpair(start,factor,rlmode,sequence.flags[4],kerns) -- currentlookup.flags ?
- if trace_kerns then
- logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),dx,dy,w,h)
- end
- else -- needs checking .. maybe no kerns format for single
- local k = setkern(start,factor,rlmode,kerns,injection)
- if trace_kerns then
- logprocess("%s: shifting single %s by %p",cref(dataset,sequence),gref(startchar),k)
+ local kerns = mapping[startchar]
+ if kerns then
+ local format = currentlookup.format
+ if format == "single" then
+ local dx, dy, w, h = setposition(0,start,factor,rlmode,kerns) -- currentlookup.flags ?
+ if trace_kerns then
+ logprocess("%s: shifting single %s by %s (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),format,dx,dy,w,h)
+ end
+ else -- needs checking .. maybe no kerns format for single
+ local k = (format == "move" and setmove or setkern)(start,factor,rlmode,kerns,injection)
+ if trace_kerns then
+ logprocess("%s: shifting single %s by %s %p",cref(dataset,sequence),gref(startchar),format,k)
+ end
end
+ return head, start, true
end
end
return head, start, false
end
-function chainprocs.gpos_pair(head,start,stop,dataset,sequence,currentlookup,rlmode,chainindex) -- todo: injections ?
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+function chainprocs.gpos_pair(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex) -- todo: injections ?
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local snext = getnext(start)
if snext then
local startchar = getchar(start)
- local step = steps[1]
- local kerns = step.coverage[startchar] -- always 1 step
+ local kerns = mapping[startchar] -- always 1 step
if kerns then
local prev = start
while snext do
@@ -1555,48 +1522,49 @@ function chainprocs.gpos_pair(head,start,stop,dataset,sequence,currentlookup,rlm
if not nextchar then
break
end
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
+ if skiphash and skiphash[nextchar] then
+ prev = snext
snext = getnext(snext)
- elseif not krn then
- break
- elseif step.format == "pair" then
- local a, b = krn[1], krn[2]
- if optimizekerns then
- -- this permits a mixed table, but we could also decide to optimize this
- -- in the loader and use format 'kern'
- if not b and a[1] == 0 and a[2] == 0 and a[4] == 0 then
- local k = setkern(snext,factor,rlmode,a[3],"injections")
+ else
+ local krn = kerns[nextchar]
+ if not krn then
+ break
+ end
+ local format = currentlookup.format
+ if format == "pair" then
+ local a, b = krn[1], krn[2]
+ if a == true then
+ -- zero
+ elseif a then
+ local x, y, w, h = setposition(1,start,factor,rlmode,a,"injections") -- currentlookups flags?
if trace_kerns then
- logprocess("%s: shifting single %s by %p",cref(dataset,sequence),gref(startchar),k)
+ local startchar = getchar(start)
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
end
- return head, start, true
end
- end
- if a and #a > 0 then
- local startchar = getchar(start)
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,"injections") -- currentlookups flags?
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ if b == true then
+ -- zero
+ start = snext -- cf spec
+ elseif b then -- #b > 0
+ local x, y, w, h = setposition(2,snext,factor,rlmode,b,"injections")
+ if trace_kerns then
+ local startchar = getchar(start)
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ start = snext -- cf spec
+ elseif forcepairadvance then
+ start = snext -- for testing, not cf spec
end
- end
- if b and #b > 0 then
- local startchar = getchar(start)
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,"injections")
+ return head, start, true
+ elseif krn ~= 0 then
+ local k = (format == "move" and setmove or setkern)(snext,factor,rlmode,krn)
if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(dataset,sequence),gref(startchar),gref(nextchar),x,y,w,h)
+ logprocess("%s: inserting %s %p between %s and %s",cref(dataset,sequence),format,k,gref(getchar(prev)),gref(nextchar))
end
+ return head, start, true
+ else
+ break
end
- return head, start, true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(dataset,sequence),k,gref(getchar(prev)),gref(nextchar))
- end
- return head, start, true
- else
- break
end
end
end
@@ -1605,18 +1573,15 @@ function chainprocs.gpos_pair(head,start,stop,dataset,sequence,currentlookup,rlm
return head, start, false
end
-function chainprocs.gpos_mark2base(head,start,stop,dataset,sequence,currentlookup,rlmode)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+function chainprocs.gpos_mark2base(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local markchar = getchar(start)
if marks[markchar] then
- local markanchors = steps[1].coverage[markchar] -- always 1 step
+ local markanchors = mapping[markchar] -- always 1 step
if markanchors then
local base = getprev(start) -- [glyph] [start=mark]
if base then
@@ -1651,8 +1616,8 @@ function chainprocs.gpos_mark2base(head,start,stop,dataset,sequence,currentlooku
if ma then
local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],false,checkmarks)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
- cref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ logprocess("%s, bound %s, anchoring mark %s to basechar %s => (%p,%p)",
+ cref(dataset,sequence),bound,gref(markchar),gref(basechar),dx,dy)
end
return head, start, true
end
@@ -1673,18 +1638,15 @@ function chainprocs.gpos_mark2base(head,start,stop,dataset,sequence,currentlooku
return head, start, false
end
-function chainprocs.gpos_mark2ligature(head,start,stop,dataset,sequence,currentlookup,rlmode)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+function chainprocs.gpos_mark2ligature(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local markchar = getchar(start)
if marks[markchar] then
- local markanchors = steps[1].coverage[markchar] -- always 1 step
+ local markanchors = mapping[markchar] -- always 1 step
if markanchors then
local base = getprev(start) -- [glyph] [optional marks] [start=mark]
if base then
@@ -1722,8 +1684,8 @@ function chainprocs.gpos_mark2ligature(head,start,stop,dataset,sequence,currentl
if ba then
local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],false,checkmarks)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
- cref(dataset,sequence),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ logprocess("%s, bound %s, anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(dataset,sequence),a or bound,gref(markchar),gref(basechar),index,dx,dy)
end
return head, start, true
end
@@ -1745,18 +1707,15 @@ function chainprocs.gpos_mark2ligature(head,start,stop,dataset,sequence,currentl
return head, start, false
end
-function chainprocs.gpos_mark2mark(head,start,stop,dataset,sequence,currentlookup,rlmode)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+function chainprocs.gpos_mark2mark(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local markchar = getchar(start)
if marks[markchar] then
- local markanchors = steps[1].coverage[markchar] -- always 1 step
+ local markanchors = mapping[markchar] -- always 1 step
if markanchors then
local base = getprev(start) -- [glyph] [basemark] [start=mark]
local slc = getligaindex(start)
@@ -1779,8 +1738,8 @@ function chainprocs.gpos_mark2mark(head,start,stop,dataset,sequence,currentlooku
if ma then
local dx, dy, bound = setmark(start,base,factor,rlmode,ba,ma,characters[basechar],true,checkmarks)
if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
- cref(dataset,sequence),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ logprocess("%s, bound %s, anchoring mark %s to basemark %s => (%p,%p)",
+ cref(dataset,sequence),bound,gref(markchar),gref(basechar),dx,dy)
end
return head, start, true
end
@@ -1801,17 +1760,14 @@ function chainprocs.gpos_mark2mark(head,start,stop,dataset,sequence,currentlooku
return head, start, false
end
-function chainprocs.gpos_cursive(head,start,stop,dataset,sequence,currentlookup,rlmode)
- local steps = currentlookup.steps
- local nofsteps = currentlookup.nofsteps
- if nofsteps > 1 then
- reportmoresteps(dataset,sequence)
+function chainprocs.gpos_cursive(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash,chainindex)
+ local mapping = currentlookup.mapping
+ if mapping == nil then
+ mapping = getmapping(dataset,sequence,currentlookup)
end
- if nofsteps == 0 then
- reportzerosteps(dataset,sequence)
- else
+ if mapping then
local startchar = getchar(start)
- local exitanchors = steps[1].coverage[startchar] -- always 1 step
+ local exitanchors = mapping[startchar] -- always 1 step
if exitanchors then
if marks[startchar] then
if trace_cursive then
@@ -1833,9 +1789,10 @@ function chainprocs.gpos_cursive(head,start,stop,dataset,sequence,currentlookup,
if entry then
entry = entry[2]
if entry then
- local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ local r2lflag = sequence.flags[4] -- mentioned in the standard
+ local dx, dy, bound = setcursive(start,nxt,factor,rlmode,exit,entry,characters[startchar],characters[nextchar],r2lflag)
if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,anchor,bound,mref(rlmode))
+ logprocess("%s: moving %s to %s cursive (%p,%p) using bound %s in %s mode",pref(dataset,sequence),gref(startchar),gref(nextchar),dx,dy,bound,mref(rlmode))
end
return head, start, true
end
@@ -1865,36 +1822,50 @@ end
-- A previous version had disc collapsing code in the (single sub) handler plus some
-- checking in the main loop, but that left the pre/post sequences undone. The best
-- solution is to add some checking there and backtrack when a replace/post matches
--- but it takes a bit of work to figure out an efficient way (this is what the sweep*
--- names refer to). I might look into that variant one day again as it can replace
--- some other code too. In that approach we can have a special version for gub and pos
--- which gains some speed. This method does the test and passes info to the handlers
--- (sweepnode, sweepmode, sweepprev, sweepnext, etc). Here collapsing is handled in the
--- main loop which also makes code elsewhere simpler (i.e. no need for the other special
--- runners and disc code in ligature building). I also experimented with pushing preceding
--- glyphs sequences in the replace/pre fields beforehand which saves checking afterwards
--- but at the cost of duplicate glyphs (memory) but it's too much overhead (runtime).
+-- but it takes a bit of work to figure out an efficient way (this is what the
+-- sweep* names refer to). I might look into that variant one day again as it can
+-- replace some other code too. In that approach we can have a special version for
+-- gub and pos which gains some speed. This method does the test and passes info to
+-- the handlers. Here collapsing is handled in the main loop which also makes code
+-- elsewhere simpler (i.e. no need for the other special runners and disc code in
+-- ligature building). I also experimented with pushing preceding glyphs sequences
+-- in the replace/pre fields beforehand which saves checking afterwards but at the
+-- cost of duplicate glyphs (memory) but it's too much overhead (runtime).
--
--- In the meantime Kai had moved the code from the single chain into a more general handler
--- and this one (renamed to chaindisk) is used now. I optimized the code a bit and brought
--- it in sycn with the other code. Hopefully I didn't introduce errors. Note: this somewhat
--- complex approach is meant for fonts that implement (for instance) ligatures by character
--- replacement which to some extend is not that suitable for hyphenation. I also use some
--- helpers. This method passes some states but reparses the list. There is room for a bit of
--- speed up but that will be done in the context version. (In fact a partial rewrite of all
--- code can bring some more efficientry.)
+-- In the meantime Kai had moved the code from the single chain into a more general
+-- handler and this one (renamed to chaindisk) is used now. I optimized the code a
+-- bit and brought it in sycn with the other code. Hopefully I didn't introduce
+-- errors. Note: this somewhat complex approach is meant for fonts that implement
+-- (for instance) ligatures by character replacement which to some extend is not
+-- that suitable for hyphenation. I also use some helpers. This method passes some
+-- states but reparses the list. There is room for a bit of speed up but that will
+-- be done in the context version. (In fact a partial rewrite of all code can bring
+-- some more efficiency.)
--
--- I didn't test it with extremes but successive disc nodes still can give issues but in
--- order to handle that we need more complex code which also slows down even more. The main
--- loop variant could deal with that: test, collapse, backtrack.
+-- I didn't test it with extremes but successive disc nodes still can give issues
+-- but in order to handle that we need more complex code which also slows down even
+-- more. The main loop variant could deal with that: test, collapse, backtrack.
+
+local userkern = nuts.pool and nuts.pool.newkern -- context
+
+do if not userkern then -- generic
+
+ local thekern = nuts.new("kern",1) -- userkern
+ local setkern = nuts.setkern -- not injections.setkern
-local new_kern = nuts.pool.kern
+ userkern = function(k)
+ local n = copy_node(thekern)
+ setkern(n,k)
+ return n
+ end
+
+end end
local function checked(head)
local current = head
while current do
if getid(current) == glue_code then
- local kern = new_kern(getwidth(current))
+ local kern = userkern(getwidth(current))
if head == current then
local next = getnext(current)
if next then
@@ -1925,23 +1896,23 @@ end
local noflags = { false, false, false, false }
-local function chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
+local function chainrun(head,start,last,dataset,sequence,rlmode,skiphash,ck)
local size = ck[5] - ck[4] + 1
- local flags = sequence.flags or noflags
- local done = false
- local skipmark = flags[1]
local chainlookups = ck[6]
+ local done = false
-- current match
if chainlookups then
- local nofchainlookups = #chainlookups
-- Lookups can be like { 1, false, 3 } or { false, 2 } or basically anything and
-- #lookups can be less than #current
+
if size == 1 then
+
-- if nofchainlookups > size then
-- -- bad rules
-- end
+
local chainlookup = chainlookups[1]
for j=1,#chainlookup do
local chainstep = chainlookup[j]
@@ -1949,7 +1920,7 @@ local function chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
local chainproc = chainprocs[chainkind]
if chainproc then
local ok
- head, start, ok = chainproc(head,start,last,dataset,sequence,chainstep,rlmode,1)
+ head, start, ok = chainproc(head,start,last,dataset,sequence,chainstep,rlmode,skiphash)
if ok then
done = true
end
@@ -1957,7 +1928,9 @@ local function chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
logprocess("%s: %s is not yet supported (1)",cref(dataset,sequence),chainkind)
end
end
+
else
+
-- See LookupType 5: Contextual Substitution Subtable. Now it becomes messy. The
-- easiest case is where #current maps on #lookups i.e. one-to-one. But what if
-- we have a ligature. Cf the spec we then need to advance one character but we
@@ -1968,14 +1941,19 @@ local function chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
--
-- Even worse are these family emoji shapes as they can have multiple lookups
-- per slot (probably only for gpos).
+
+ -- It's very unlikely that we will have skip classes here but still ... we seldom
+ -- enter this branch anyway.
+
local i = 1
+ local laststart = start
+ local nofchainlookups = #chainlookups -- useful?
while start do
- if skipped then
+ if skiphash then -- hm, so we know we skip some
while start do
- local char = getchar(start)
- local class = classes[char]
- if class then
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
+ local char = ischar(start,currentfont)
+ if char then
+ if skiphash and skiphash[char] then
start = getnext(start)
else
break
@@ -1993,12 +1971,13 @@ local function chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
local chainproc = chainprocs[chainkind]
if chainproc then
local ok, n
- head, start, ok, n = chainproc(head,start,last,dataset,sequence,chainstep,rlmode,i)
+ head, start, ok, n = chainproc(head,start,last,dataset,sequence,chainstep,rlmode,skiphash,i)
-- messy since last can be changed !
if ok then
done = true
if n and n > 1 and i + n > nofchainlookups then
-- this is a safeguard, we just ignore the rest of the lookups
+ i = size -- prevents an advance
break
end
end
@@ -2012,15 +1991,20 @@ local function chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
if i > size or not start then
break
elseif start then
+ laststart = start
start = getnext(start)
end
end
+ if not start then
+ start = laststart
+ end
+
end
else
-- todo: needs checking for holes in the replacements
local replacements = ck[7]
if replacements then
- head, start, done = reversesub(head,start,last,dataset,sequence,replacements,rlmode)
+ head, start, done = reversesub(head,start,last,dataset,sequence,replacements,rlmode,skiphash)
else
done = true
if trace_contexts then
@@ -2031,7 +2015,7 @@ local function chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
return head, start, done
end
-local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
+local function chaindisk(head,start,dataset,sequence,rlmode,skiphash,ck)
if not start then
return head, start, false
@@ -2098,7 +2082,7 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
if current then
-- go on
elseif sweepoverflow then
- -- we already are folling up on sweepnode
+ -- we already are following up on sweepnode
break
elseif sweeptype == "post" or sweeptype == "replace" then
current = getnext(sweepnode)
@@ -2213,7 +2197,6 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
end
end
end
-
local done = false
if lookaheaddisc then
@@ -2241,7 +2224,7 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
head = lookaheaddisc
end
local pre, post, replace = getdisc(lookaheaddisc)
- local new = copy_node_list(cf)
+ local new = copy_node_list(cf) -- br, how often does that happen
local cnew = new
if pre then
setlink(find_node_tail(cf),pre)
@@ -2261,14 +2244,14 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
end
if not notmatchpre[lookaheaddisc] then
local ok = false
- cf, start, ok = chainrun(cf,start,cl,dataset,sequence,rlmode,ck,skipped)
+ cf, start, ok = chainrun(cf,start,cl,dataset,sequence,rlmode,skiphash,ck)
if ok then
done = true
end
end
if not notmatchreplace[lookaheaddisc] then
local ok = false
- new, cnew, ok = chainrun(new,cnew,clast,dataset,sequence,rlmode,ck,skipped)
+ new, cnew, ok = chainrun(new,cnew,clast,dataset,sequence,rlmode,skiphash,ck)
if ok then
done = true
end
@@ -2279,8 +2262,8 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
setdisc(lookaheaddisc,cf,post,new)
end
start = getprev(lookaheaddisc)
- sweephead[cf] = getnext(clast)
- sweephead[new] = getnext(cl)
+ sweephead[cf] = getnext(clast) or false
+ sweephead[new] = getnext(cl) or false
elseif backtrackdisc then
@@ -2299,10 +2282,7 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
break
end
end
- if cnext then
- setprev(cnext,backtrackdisc)
- end
- setnext(backtrackdisc,cnext)
+ setlink(backtrackdisc,cnext)
setprev(cf)
setnext(cl)
local pre, post, replace, pretail, posttail, replacetail = getdisc(backtrackdisc,true)
@@ -2317,14 +2297,14 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
end
if not notmatchpost[backtrackdisc] then
local ok = false
- cf, start, ok = chainrun(cf,start,last,dataset,sequence,rlmode,ck,skipped)
+ cf, start, ok = chainrun(cf,start,last,dataset,sequence,rlmode,skiphash,ck)
if ok then
done = true
end
end
if not notmatchreplace[backtrackdisc] then
local ok = false
- new, cnew, ok = chainrun(new,cnew,clast,dataset,sequence,rlmode,ck,skipped)
+ new, cnew, ok = chainrun(new,cnew,clast,dataset,sequence,rlmode,skiphash,ck)
if ok then
done = true
end
@@ -2345,13 +2325,13 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
setdisc(backtrackdisc,pre,post,replace)
end
start = getprev(backtrackdisc)
- sweephead[post] = getnext(clast)
- sweephead[replace] = getnext(last)
+ sweephead[post] = getnext(clast) or false
+ sweephead[replace] = getnext(last) or false
else
local ok = false
- head, start, ok = chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
+ head, start, ok = chainrun(head,start,last,dataset,sequence,rlmode,skiphash,ck)
if ok then
done = true
end
@@ -2361,463 +2341,464 @@ local function chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
return head, start, done
end
-local function chaintrac(head,start,dataset,sequence,rlmode,ck,skipped)
+local function chaintrac(head,start,dataset,sequence,rlmode,skiphash,ck,match,discseen,sweepnode)
local rule = ck[1]
local lookuptype = ck[8] or ck[2]
local nofseq = #ck[3]
local first = ck[4]
local last = ck[5]
local char = getchar(start)
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
- cref(dataset,sequence),rule,gref(char),first-1,last-first+1,nofseq-last,lookuptype)
+ logwarning("%s: rule %s %s at char %s for (%s,%s,%s) chars, lookuptype %a, %sdisc seen, %ssweeping",
+ cref(dataset,sequence),rule,match and "matches" or "nomatch",
+ gref(char),first-1,last-first+1,nofseq-last,lookuptype,
+ discseen and "" or "no ", sweepnode and "" or "not ")
end
-local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode)
+-- The next one is quite optimized but still somewhat slow, fonts like ebgaramond
+-- are real torture tests because they have many steps with one context (having
+-- multiple contexts makes more sense) also because we (can) reduce them. Instead of
+-- a match boolean variable and check for that I decided to use a goto with labels
+-- instead. This is one of the cases where it makes the code more readable and we
+-- might even gain a bit performance.
+
+-- when we have less replacements (lookups) then current matches we can push too much into
+-- the previous disc .. such be it ( with only f done)
+
+local function handle_contextchain(head,start,dataset,sequence,contexts,rlmode,skiphash)
+ -- optimizing for rlmode gains nothing
local sweepnode = sweepnode
local sweeptype = sweeptype
+ local postreplace
+ local prereplace
+ local checkdisc
+ local discseen -- = false
+ if sweeptype then
+ if sweeptype == "replace" then
+ postreplace = true
+ prereplace = true
+ else
+ postreplace = sweeptype == "post"
+ prereplace = sweeptype == "pre"
+ end
+ checkdisc = getprev(head)
+ end
local currentfont = currentfont
- local diskseen = false
- local checkdisc = sweeptype and getprev(head)
- local flags = sequence.flags or noflags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local markclass = sequence.markclass
- local skipped = false
+
+ local skipped -- = false
+
local startprev,
startnext = getboth(start)
- for k=1,#contexts do -- i've only seen ccmp having > 1 (e.g. dejavu)
- local match = true
+ local done -- = false
+
+ -- we can have multiple hits and as we scan (currently) all we need to check
+ -- if we have a match ... contextchains have no real coverage table (with
+ -- unique entries)
+
+ -- fonts can have many steps (each doing one check) or many contexts
+
+ -- todo: make a per-char cache so that we have small contexts (when we have a context
+ -- n == 1 and otherwise it can be more so we can even distingish n == 1 or more)
+
+ local nofcontexts = contexts.n -- #contexts
+
+ local startchar = nofcontext == 1 or ischar(start,currentfont) -- only needed in a chain
+
+ for k=1,nofcontexts do -- does this disc mess work well with n > 1
+
+ local ck = contexts[k]
+ local seq = ck[3]
+ local f = ck[4] -- first current
+ if not startchar or not seq[f][startchar] then
+ -- report("no hit in %a at %i of %i contexts",sequence.type,k,nofcontexts)
+ goto next
+ end
+ local s = seq.n -- or #seq
+ local l = ck[5] -- last current
local current = start
local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- local size = 1
- -- f..l = mid string
- if s == 1 then
- -- this seldom happens as it makes no sense (bril, ebgaramond, husayni, minion)
- local char = ischar(current,currentfont)
- if char then
- if not seq[1][char] then
- match = false
+
+ -- current match
+
+ if l > f then
+ -- before/current/after | before/current | current/after
+ local discfound -- = nil
+ local n = f + 1
+ last = startnext -- the second in current (first already matched)
+ while n <= l do
+ if postreplace and not last then
+ last = getnext(sweepnode)
+ sweeptype = nil
end
- end
- else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f = ck[4]
- local l = ck[5]
- -- current match
- size = l - f + 1
- if size > 1 then
- -- before/current/after | before/current | current/after
- local discfound -- = nil
- local n = f + 1
- -- last = getnext(last) -- the second in current (first already matched)
- last = startnext -- the second in current (first already matched)
- while n <= l do
- if not last and (sweeptype == "post" or sweeptype == "replace") then
- last = getnext(sweepnode)
- sweeptype = nil
- end
- if last then
- local char, id = ischar(last,currentfont)
- if char then
- local class = classes[char]
- if class then
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(dataset,sequence,char,ck,class)
- end
- last = getnext(last)
- elseif seq[n][char] then
- if n < l then
- last = getnext(last)
- end
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpre[discfound] then
- match = false
- end
- else
- match = false
- end
- break
- end
+ if last then
+ local char, id = ischar(last,currentfont)
+ if char then
+ if skiphash and skiphash[char] then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,classes[char])
+ end
+ last = getnext(last)
+ elseif seq[n][char] then
+ if n < l then
+ last = getnext(last)
+ end
+ n = n + 1
+ elseif discfound then
+ notmatchreplace[discfound] = true
+ if notmatchpre[discfound] then
+ goto next
else
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpre[discfound] then
- match = false
- end
- else
- match = false
- end
break
end
- elseif char == false then
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpre[discfound] then
- match = false
- end
+ else
+ goto next
+ end
+ elseif char == false then
+ if discfound then
+ notmatchreplace[discfound] = true
+ if notmatchpre[discfound] then
+ goto next
else
- match = false
+ break
end
- break
- elseif id == disc_code then
- diskseen = true
- discfound = last
- notmatchpre[last] = nil
- notmatchpost[last] = true
- notmatchreplace[last] = nil
- local pre, post, replace = getdisc(last)
- if pre then
- local n = n
- while pre do
- if seq[n][getchar(pre)] then
- n = n + 1
- pre = getnext(pre)
- if n > l then
- break
- end
- else
- notmatchpre[last] = true
+ else
+ goto next
+ end
+ elseif id == disc_code then
+ -- elseif id == disc_code and (not discs or discs[last]) then
+ discseen = true
+ discfound = last
+ notmatchpre[last] = nil
+ notmatchpost[last] = true
+ notmatchreplace[last] = nil
+ local pre, post, replace = getdisc(last)
+ if pre then
+ local n = n
+ while pre do
+ if seq[n][getchar(pre)] then
+ n = n + 1
+ if n > l then
break
end
- end
- if n <= l then
+ pre = getnext(pre)
+ else
notmatchpre[last] = true
+ break
end
- else
- notmatchpre[last] = true
end
- if replace then
- -- so far we never entered this branch
- while replace do
- if seq[n][getchar(replace)] then
- n = n + 1
- replace = getnext(replace)
- if n > l then
- break
- end
+ -- commented, for Kai to check
+ -- if n <= l then
+ -- notmatchpre[last] = true
+ -- end
+ else
+ notmatchpre[last] = true
+ end
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ if n > l then
+ break
+ end
+ replace = getnext(replace)
+ else
+ notmatchreplace[last] = true
+ if notmatchpre[last] then
+ goto next
else
- notmatchreplace[last] = true
- if notmatchpre[last] then
- match = false
- end
break
end
end
- -- why here again
- if notmatchpre[last] then
- match = false
- end
end
- -- maybe only if match
- last = getnext(last)
- else
- match = false
- break
+ -- why here again
+ if notmatchpre[last] then
+ goto next
+ end
end
+ -- maybe only if match
+ last = getnext(last)
else
- match = false
- break
+ goto next
end
+ else
+ goto next
end
end
- -- before
- if match and f > 1 then
- -- local prev = getprev(start)
- -- if prev then
- if startprev then
- local prev = startprev
- if prev == checkdisc and (sweeptype == "pre" or sweeptype == "replace") then
- prev = getprev(sweepnode)
- -- sweeptype = nil
- end
- if prev then
- local discfound -- = nil
- local n = f - 1
- while n >= 1 do
- if prev then
- local char, id = ischar(prev,currentfont)
- if char then
- local class = classes[char]
- if class then
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(dataset,sequence,char,ck,class)
- end
- prev = getprev(prev)
- elseif seq[n][char] then
- if n > 1 then
- prev = getprev(prev)
- end
- n = n - 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpost[discfound] then
- match = false
- end
- else
- match = false
- end
- break
- end
+ end
+
+ -- before
+
+ if f > 1 then
+ if startprev then
+ local prev = startprev
+ if prereplace and prev == checkdisc then
+ prev = getprev(sweepnode)
+ end
+ if prev then
+ local discfound -- = nil
+ local n = f - 1
+ while n >= 1 do
+ if prev then
+ local char, id = ischar(prev,currentfont)
+ if char then
+ if skiphash and skiphash[char] then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,classes[char])
+ end
+ prev = getprev(prev)
+ elseif seq[n][char] then
+ if n > 1 then
+ prev = getprev(prev)
+ end
+ n = n - 1
+ elseif discfound then
+ notmatchreplace[discfound] = true
+ if notmatchpost[discfound] then
+ goto next
else
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpost[discfound] then
- match = false
- end
- else
- match = false
- end
break
end
- elseif char == false then
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpost[discfound] then
- match = false
- end
- else
- match = false
+ else
+ goto next
+ end
+ elseif char == false then
+ if discfound then
+ notmatchreplace[discfound] = true
+ if notmatchpost[discfound] then
+ goto next
end
- break
- elseif id == disc_code then
- -- the special case: f i where i becomes dottless i ..
- diskseen = true
- discfound = prev
- notmatchpre[prev] = true
- notmatchpost[prev] = nil
- notmatchreplace[prev] = nil
- local pre, post, replace, pretail, posttail, replacetail = getdisc(prev,true)
- if pre ~= start and post ~= start and replace ~= start then
- if post then
- local n = n
- while posttail do
- if seq[n][getchar(posttail)] then
- n = n - 1
- if posttail == post then
- break
- else
- posttail = getprev(posttail)
- if n < 1 then
- break
- end
- end
- else
- notmatchpost[prev] = true
+ else
+ goto next
+ end
+ break
+ elseif id == disc_code then
+ -- elseif id == disc_code and (not discs or discs[prev]) then
+ -- the special case: f i where i becomes dottless i ..
+ discseen = true
+ discfound = prev
+ notmatchpre[prev] = true
+ notmatchpost[prev] = nil
+ notmatchreplace[prev] = nil
+ local pre, post, replace, pretail, posttail, replacetail = getdisc(prev,true)
+ -- weird test: needs checking
+ if pre ~= start and post ~= start and replace ~= start then
+ if post then
+ local n = n
+ while posttail do
+ if seq[n][getchar(posttail)] then
+ n = n - 1
+ if posttail == post or n < 1 then
break
+ else
+ posttail = getprev(posttail)
end
- end
- if n >= 1 then
+ else
notmatchpost[prev] = true
+ break
end
- else
+ end
+ if n >= 1 then
notmatchpost[prev] = true
end
- if replace then
- -- we seldom enter this branch (e.g. on brill efficient)
- while replacetail do
- if seq[n][getchar(replacetail)] then
- n = n - 1
- if replacetail == replace then
- break
- else
- replacetail = getprev(replacetail)
- if n < 1 then
- break
- end
- end
+ else
+ notmatchpost[prev] = true
+ end
+ if replace then
+ -- we seldom enter this branch (e.g. on brill efficient)
+ while replacetail do
+ if seq[n][getchar(replacetail)] then
+ n = n - 1
+ if replacetail == replace or n < 1 then
+ break
+ else
+ replacetail = getprev(replacetail)
+ end
+ else
+ notmatchreplace[prev] = true
+ if notmatchpost[prev] then
+ goto next
else
- notmatchreplace[prev] = true
- if notmatchpost[prev] then
- match = false
- end
break
end
end
- if not match then
- break
- end
end
+ else
+ notmatchreplace[prev] = true -- new, for Kai to check
end
- -- maybe only if match
- prev = getprev(prev)
- elseif id == glue_code and seq[n][32] and isspace(prev,threshold,id) then
+ end
+ prev = getprev(prev)
+ -- elseif id == glue_code and seq[n][32] and isspace(prev,threshold,id) then
+ -- elseif seq[n][32] and spaces[prev] then
+ -- n = n - 1
+ -- prev = getprev(prev)
+ elseif id == glue_code then
+ local sn = seq[n]
+ if (sn[32] and spaces[prev]) or sn[0xFFFC] then
n = n - 1
prev = getprev(prev)
else
- match = false
- break
+ goto next
end
+ elseif seq[n][0xFFFC] then
+ n = n - 1
+ prev = getprev(prev)
else
- match = false
- break
+ goto next
end
+ else
+ goto next
end
- else
- match = false
end
else
- match = false
+ goto next
end
+ else
+ goto next
end
- -- after
- if match and s > l then
- local current = last and getnext(last)
- if not current and (sweeptype == "post" or sweeptype == "replace") then
- current = getnext(sweepnode)
- -- sweeptype = nil
- end
- if current then
- local discfound -- = nil
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local char, id = ischar(current,currentfont)
- if char then
- local class = classes[char]
- if class then
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(dataset,sequence,char,ck,class)
- end
- current = getnext(current) -- was absent
- elseif seq[n][char] then
- if n < s then -- new test
- current = getnext(current) -- was absent
- end
- n = n + 1
- else
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpre[discfound] then
- match = false
- end
- else
- match = false
- end
- break
- end
+ end
+
+ -- after
+
+ if s > l then
+ local current = last and getnext(last)
+ if not current and postreplace then
+ current = getnext(sweepnode)
+ end
+ if current then
+ local discfound -- = nil
+ local n = l + 1
+ while n <= s do
+ if current then
+ local char, id = ischar(current,currentfont)
+ if char then
+ if skiphash and skiphash[char] then
+ skipped = true
+ if trace_skips then
+ show_skip(dataset,sequence,char,ck,classes[char])
+ end
+ current = getnext(current) -- was absent
+ elseif seq[n][char] then
+ if n < s then -- new test
+ current = getnext(current) -- was absent
+ end
+ n = n + 1
+ elseif discfound then
+ notmatchreplace[discfound] = true
+ if notmatchpre[discfound] then
+ goto next
else
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpre[discfound] then
- match = false
- end
- else
- match = false
- end
break
end
- elseif char == false then
- if discfound then
- notmatchreplace[discfound] = true
- if notmatchpre[discfound] then
- match = false
- end
+ else
+ goto next
+ end
+ elseif char == false then
+ if discfound then
+ notmatchreplace[discfound] = true
+ if notmatchpre[discfound] then
+ goto next
else
- match = false
+ break
end
- break
- elseif id == disc_code then
- diskseen = true
- discfound = current
- notmatchpre[current] = nil
- notmatchpost[current] = true
- notmatchreplace[current] = nil
- local pre, post, replace = getdisc(current)
- if pre then
- local n = n
- while pre do
- if seq[n][getchar(pre)] then
- n = n + 1
- pre = getnext(pre)
- if n > s then
- break
- end
- else
- notmatchpre[current] = true
+ else
+ goto next
+ end
+ elseif id == disc_code then
+ -- elseif id == disc_code and (not discs or discs[current]) then
+ discseen = true
+ discfound = current
+ notmatchpre[current] = nil
+ notmatchpost[current] = true
+ notmatchreplace[current] = nil
+ local pre, post, replace = getdisc(current)
+ if pre then
+ local n = n
+ while pre do
+ if seq[n][getchar(pre)] then
+ n = n + 1
+ if n > s then
break
+ else
+ pre = getnext(pre)
end
- end
- if n <= s then
+ else
notmatchpre[current] = true
+ break
end
- else
+ end
+ if n <= s then
notmatchpre[current] = true
end
- if replace then
- -- so far we never entered this branch
- while replace do
- if seq[n][getchar(replace)] then
- n = n + 1
+ else
+ notmatchpre[current] = true
+ end
+ if replace then
+ -- so far we never entered this branch
+ while replace do
+ if seq[n][getchar(replace)] then
+ n = n + 1
+ if n > s then
+ break
+ else
replace = getnext(replace)
- if n > s then
- break
- end
+ end
+ else
+ notmatchreplace[current] = true
+ if notmatchpre[current] then
+ goto next
else
- notmatchreplace[current] = true
- -- different than others, needs checking if "not" is okay
- if not notmatchpre[current] then
- match = false
- end
break
end
end
- if not match then
- break
- end
- else
- -- skip 'm
end
- -- maybe only if match
- current = getnext(current)
- elseif id == glue_code and seq[n][32] and isspace(current,threshold,id) then
+ else
+ notmatchreplace[current] = true -- new, for Kai to check
+ end
+ current = getnext(current)
+ elseif id == glue_code then
+ local sn = seq[n]
+ if (sn[32] and spaces[current]) or sn[0xFFFC] then
n = n + 1
current = getnext(current)
else
- match = false
- break
+ goto next
end
+ elseif seq[n][0xFFFC] then
+ n = n + 1
+ current = getnext(current)
else
- match = false
- break
+ goto next
end
+ else
+ goto next
end
- else
- match = false
end
- end
- end
- if match then
- if trace_contexts then
- chaintrac(head,start,dataset,sequence,rlmode,ck,skipped)
- end
- if diskseen or sweepnode then
- head, start, done = chaindisk(head,start,dataset,sequence,rlmode,ck,skipped)
else
- head, start, done = chainrun(head,start,last,dataset,sequence,rlmode,ck,skipped)
- end
- if done then
- break -- out of contexts (new, needs checking)
+ goto next
end
end
+
+ if trace_contexts then
+ chaintrac(head,start,dataset,sequence,rlmode,skipped and skiphash,ck,true,discseen,sweepnode)
+ end
+ if discseen or sweepnode then
+ head, start, done = chaindisk(head,start,dataset,sequence,rlmode,skipped and skiphash,ck)
+ else
+ head, start, done = chainrun(head,start,last,dataset,sequence,rlmode,skipped and skiphash,ck)
+ end
+ if done then
+ break
+ -- else
+ -- next context
+ end
+ ::next::
+ -- if trace_chains then
+ -- chaintrac(head,start,dataset,sequence,rlmode,skipped and skiphash,ck,false,discseen,sweepnode)
+ -- end
end
- if diskseen then
+ if discseen then
notmatchpre = { }
notmatchpost = { }
notmatchreplace = { }
@@ -2833,13 +2814,19 @@ handlers.gpos_context = handle_contextchain
-- this needs testing
-local function chained_contextchain(head,start,stop,dataset,sequence,currentlookup,rlmode)
+local function chained_contextchain(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash)
local steps = currentlookup.steps
local nofsteps = currentlookup.nofsteps
if nofsteps > 1 then
reportmoresteps(dataset,sequence)
end
- return handle_contextchain(head,start,dataset,sequence,currentlookup,rlmode)
+ -- probably wrong
+ local l = steps[1].coverage[getchar(start)]
+ if l then
+ return handle_contextchain(head,start,dataset,sequence,l,rlmode,skiphash)
+ else
+ return head, start, false
+ end
end
chainprocs.gsub_context = chained_contextchain
@@ -2848,16 +2835,18 @@ chainprocs.gsub_reversecontextchain = chained_contextchain
chainprocs.gpos_contextchain = chained_contextchain
chainprocs.gpos_context = chained_contextchain
+------------------------------
+
-- experiment (needs no handler in font-otc so not now):
--
-- function otf.registerchainproc(name,f)
-- -- chainprocs[name] = f
--- chainprocs[name] = function(head,start,stop,dataset,sequence,currentlookup,rlmode)
+-- chainprocs[name] = function(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash)
-- local done = currentlookup.nofsteps > 0
-- if not done then
-- reportzerosteps(dataset,sequence)
-- else
--- head, start, done = f(head,start,stop,dataset,sequence,currentlookup,rlmode)
+-- head, start, done = f(head,start,stop,dataset,sequence,currentlookup,rlmode,skiphash)
-- if not head or not start then
-- reportbadsteps(dataset,sequence)
-- end
@@ -2866,28 +2855,20 @@ chainprocs.gpos_context = chained_contextchain
-- end
-- end
-local missing = setmetatableindex("table")
+local missing = setmetatableindex("table")
+local logwarning = report_process
+local resolved = { } -- we only resolve a font,script,language pair once
local function logprocess(...)
if trace_steps then
registermessage(...)
+ if trace_steps == "silent" then
+ return
+ end
end
report_process(...)
end
-local logwarning = report_process
-
-local function report_missing_coverage(dataset,sequence)
- local t = missing[currentfont]
- if not t[sequence] then
- t[sequence] = true
- logwarning("missing coverage for feature %a, lookup %a, type %a, font %a, name %a",
- dataset[4],sequence.name,sequence.type,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
-- todo: pass all these 'locals' in a table
local sequencelists = setmetatableindex(function(t,font)
@@ -3038,14 +3019,12 @@ local function kernrun(disc,k_run,font,attr,...)
done = true
end
if prev then
- local nest = getprev(pre)
setlink(prev,pre)
if k_run(prevmarks,"preinjections",pre,font,attr,...) then -- or prev?
done = true
end
- setprev(pre,nest)
--- setprev(pre)
- setnext(prev,disc)
+ setprev(pre)
+ setlink(prev,disc)
end
end
--
@@ -3059,7 +3038,7 @@ local function kernrun(disc,k_run,font,attr,...)
done = true
end
setnext(posttail)
- setprev(next,disc)
+ setlink(disc,next)
end
end
--
@@ -3068,14 +3047,12 @@ local function kernrun(disc,k_run,font,attr,...)
done = true
end
if prev then
- local nest = getprev(replace)
setlink(prev,replace)
if k_run(prevmarks,"replaceinjections",replace,font,attr,...) then -- getnext(replace))
done = true
end
- setprev(replace,nest)
- -- setprev(replace)
- setnext(prev,disc)
+ setprev(replace)
+ setlink(prev,disc)
end
if next then
setlink(replacetail,next)
@@ -3083,7 +3060,7 @@ local function kernrun(disc,k_run,font,attr,...)
done = true
end
setnext(replacetail)
- setprev(next,disc)
+ setlink(disc,next)
end
elseif prev and next then
setlink(prev,next)
@@ -3092,6 +3069,9 @@ local function kernrun(disc,k_run,font,attr,...)
end
setlink(prev,disc,next)
end
+ if done and trace_testruns then
+ report_disc("done",disc)
+ end
return nextstart, done
end
@@ -3108,7 +3088,7 @@ local function comprun(disc,c_run,...) -- vararg faster than the whole list
--
if pre then
sweepnode = disc
- sweeptype = "pre" -- in alternative code preinjections is uc_c_sed (also used then for properties, saves a variable)
+ sweeptype = "pre" -- in alternative code preinjections is used (also used then for properties, saves a variable)
local new, done = c_run(pre,...)
if done then
pre = new
@@ -3139,88 +3119,17 @@ local function comprun(disc,c_run,...) -- vararg faster than the whole list
sweepnode = nil
sweeptype = nil
if renewed then
+ if trace_testruns then
+ report_disc("done",disc)
+ end
setdisc(disc,pre,post,replace)
end
--
return getnext(disc), renewed
end
--- local function testrun(disc,t_run,c_run,...)
--- if trace_testruns then
--- report_disc("test",disc)
--- end
--- local prev, next = getboth(disc)
--- if not next then
--- -- weird discretionary
--- return
--- end
--- local pre, post, replace, pretail, posttail, replacetail = getdisc(disc,true)
--- local done = false
--- if replace and prev then
--- -- this is a bit strange as we only do replace here and not post
--- -- anyway, we only look ahead ... the idea is that we discard a
--- -- disc when there is a ligature crossing the replace boundary
--- setlink(replacetail,next)
--- local ok, overflow = t_run(replace,next,...)
--- if ok and overflow then
--- -- so, we can have crossed the boundary
--- setfield(disc,"replace")
--- setlink(prev,replace)
--- -- setlink(replacetail,next)
--- setboth(disc)
--- flush_node_list(disc)
--- return replace, true -- restart .. tricky !
--- else
--- -- we stay inside the disc
--- setnext(replacetail)
--- setprev(next,disc)
--- end
--- -- pre, post, replace, pretail, posttail, replacetail = getdisc(disc,true)
--- end
--- --
--- -- like comprun
--- --
--- local renewed = false
--- --
--- if pre then
--- sweepnode = disc
--- sweeptype = "pre"
--- local new, ok = c_run(pre,...)
--- if ok then
--- pre = new
--- renewed = true
--- end
--- end
--- --
--- if post then
--- sweepnode = disc
--- sweeptype = "post"
--- local new, ok = c_run(post,...)
--- if ok then
--- post = new
--- renewed = true
--- end
--- end
--- --
--- if replace then
--- sweepnode = disc
--- sweeptype = "replace"
--- local new, ok = c_run(replace,...)
--- if ok then
--- replace = new
--- renewed = true
--- end
--- end
--- --
--- sweepnode = nil
--- sweeptype = nil
--- if renewed then
--- setdisc(disc,pre,post,replace)
--- return next, true
--- else
--- return next, done
--- end
--- end
+-- if we can hyphenate in a lig then unlikely a lig so we
+-- could have a option here to ignore lig
local function testrun(disc,t_run,c_run,...)
if trace_testruns then
@@ -3232,7 +3141,7 @@ local function testrun(disc,t_run,c_run,...)
return
end
local pre, post, replace, pretail, posttail, replacetail = getdisc(disc,true)
- local done = false
+ local renewed = false
if (post or replace) and prev then
if post then
setlink(posttail,next)
@@ -3246,19 +3155,23 @@ local function testrun(disc,t_run,c_run,...)
end
local d_post = t_run(post,next,...)
local d_replace = t_run(replace,next,...)
- if (d_post and d_post > 0) or (d_replace and d_replace > 0) then
- local d = d_replace or d_post
- if d_post and d < d_post then
- d = d_post
- end
- local head, tail = getnext(disc), disc
+ if d_post > 0 or d_replace > 0 then
+ local d = d_replace > d_post and d_replace or d_post
+ local head = getnext(disc) -- is: next
+ local tail = head
for i=1,d do
- tail = getnext(tail)
- if getid(tail) == disc_code then
- head, tail = flattendisk(head,tail)
+ local nx = getnext(tail)
+ local id = getid(nx)
+ if id == disc_code then
+ head, tail = flattendisk(head,nx)
+ elseif id == glyph_code then
+ tail = nx
+ else
+ -- we can have overrun into a glue
+ break
end
end
- local next = getnext(tail)
+ next = getnext(tail)
setnext(tail)
setprev(head)
local new = copy_node_list(head)
@@ -3272,7 +3185,6 @@ local function testrun(disc,t_run,c_run,...)
else
replace = new
end
- setlink(disc,next)
else
-- we stay inside the disc
if posttail then
@@ -3280,20 +3192,21 @@ local function testrun(disc,t_run,c_run,...)
else
post = nil
end
- setnext(replacetail)
if replacetail then
setnext(replacetail)
else
replace = nil
end
- setprev(next,disc)
end
+ setlink(disc,next)
-- pre, post, replace, pretail, posttail, replacetail = getdisc(disc,true)
end
--
-- like comprun
--
- local renewed = false
+ if trace_testruns then
+ report_disc("more",disc)
+ end
--
if pre then
sweepnode = disc
@@ -3329,10 +3242,12 @@ local function testrun(disc,t_run,c_run,...)
sweeptype = nil
if renewed then
setdisc(disc,pre,post,replace)
- return next, true
- else
- return next, done
+ if trace_testruns then
+ report_disc("done",disc)
+ end
end
+ -- next can have changed (copied list)
+ return getnext(disc), renewed
end
-- We can make some assumptions with respect to discretionaries. First of all it is very
@@ -3360,20 +3275,20 @@ end
-- 1{2{\oldstyle\discretionary{3}{4}{5}}6}7\par
-- 1{2\discretionary{3{\oldstyle3}}{{\oldstyle4}4}{5{\oldstyle5}5}6}7\par
-
local nesting = 0
-local function c_run_single(head,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+local function c_run_single(head,font,attr,lookupcache,step,dataset,sequence,rlmode,skiphash,handler)
local done = false
local sweep = sweephead[head]
if sweep then
start = sweep
- sweephead[head] = nil
+ -- sweephead[head] = nil
+ sweephead[head] = false
else
start = head
end
while start do
- local char = ischar(start,font)
+ local char, id = ischar(start,font)
if char then
local a -- happens often so no assignment is faster
if attr then
@@ -3383,7 +3298,7 @@ local function c_run_single(head,font,attr,lookupcache,step,dataset,sequence,rlm
local lookupmatch = lookupcache[char]
if lookupmatch then
local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,skiphash,step)
if ok then
done = true
end
@@ -3408,49 +3323,7 @@ local function c_run_single(head,font,attr,lookupcache,step,dataset,sequence,rlm
return head, done
end
--- local function t_run_single(start,stop,font,attr,lookupcache)
--- while start ~= stop do
--- local char = ischar(start,font)
--- if char then
--- local a -- happens often so no assignment is faster
--- if attr then
--- a = getattr(start,0)
--- end
--- local startnext = getnext(start)
--- if not a or (a == attr) then
--- local lookupmatch = lookupcache[char]
--- if lookupmatch then -- hm, hyphens can match (tlig) so we need to really check
--- -- if we need more than ligatures we can outline the code and use functions
--- local s = startnext
--- local l = nil
--- local d = 0
--- while s do
--- if s == stop then
--- d = 1
--- elseif d > 0 then
--- d = d + 1
--- end
--- local lg = lookupmatch[getchar(s)]
--- if lg then
--- l = lg
--- s = getnext(s)
--- else
--- break
--- end
--- end
--- if l and l.ligature then
--- return true, d > 1
--- end
--- end
--- else
--- -- go on can be a mixed one
--- end
--- start = starttnext
--- else
--- break
--- end
--- end
--- end
+-- only replace?
local function t_run_single(start,stop,font,attr,lookupcache)
local lastd = nil
@@ -3473,6 +3346,8 @@ local function t_run_single(start,stop,font,attr,lookupcache)
s = ss
ss = nil
end
+ -- a bit weird: why multiple ... anyway we can't have a disc in a disc
+ -- how about post ... we can probably merge this into the while
while getid(s) == disc_code do
ss = getnext(s)
s = getfield(s,"replace")
@@ -3484,38 +3359,48 @@ local function t_run_single(start,stop,font,attr,lookupcache)
local l = nil
local d = 0
while s do
- local lg = lookupmatch[getchar(s)]
- if lg then
- if sstop then
- d = 1
- elseif d > 0 then
- d = d + 1
- end
- l = lg
- s = getnext(s)
- sstop = s == stop
- if not s then
- s = ss
- ss = nil
- end
- while getid(s) == disc_code do
- ss = getnext(s)
- s = getfield(s,"replace")
+ local char = ischar(s,font)
+ if char then
+ local lg = lookupmatch[char]
+ if lg then
+ if sstop then
+ d = 1
+ elseif d > 0 then
+ d = d + 1
+ end
+ l = lg
+ s = getnext(s)
+ sstop = s == stop
if not s then
s = ss
ss = nil
end
+ while getid(s) == disc_code do
+ ss = getnext(s)
+ s = getfield(s,"replace")
+ if not s then
+ s = ss
+ ss = nil
+ end
+ end
+ else
+ break
end
else
break
end
end
- if l and l.ligature then
+ if l and l.ligature then -- so we test for ligature
lastd = d
end
+-- why not: if not l then break elseif l.ligature then return d end
+ else
+-- why not: break
+ -- no match (yet)
end
else
-- go on can be a mixed one
+-- why not: break
end
if lastd then
return lastd
@@ -3525,9 +3410,10 @@ local function t_run_single(start,stop,font,attr,lookupcache)
break
end
end
+ return 0
end
-local function k_run_single(sub,injection,last,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+local function k_run_single(sub,injection,last,font,attr,lookupcache,step,dataset,sequence,rlmode,skiphash,handler)
local a -- happens often so no assignment is faster
if attr then
a = getattr(sub,0)
@@ -3541,7 +3427,7 @@ local function k_run_single(sub,injection,last,font,attr,lookupcache,step,datase
if char then
local lookupmatch = lookupcache[char]
if lookupmatch then
- local h, d, ok = handler(sub,n,dataset,sequence,lookupmatch,rlmode,step,1,injection)
+ local h, d, ok = handler(sub,n,dataset,sequence,lookupmatch,rlmode,skiphash,step,injection)
if ok then
return true
end
@@ -3551,12 +3437,13 @@ local function k_run_single(sub,injection,last,font,attr,lookupcache,step,datase
end
end
-local function c_run_multiple(head,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+local function c_run_multiple(head,font,attr,steps,nofsteps,dataset,sequence,rlmode,skiphash,handler)
local done = false
local sweep = sweephead[head]
if sweep then
start = sweep
- sweephead[head] = nil
+ -- sweephead[head] = nil
+ sweephead[head] = false
else
start = head
end
@@ -3571,22 +3458,18 @@ local function c_run_multiple(head,font,attr,steps,nofsteps,dataset,sequence,rlm
for i=1,nofsteps do
local step = steps[i]
local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
- end
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,skiphash,step)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
end
- else
- report_missing_coverage(dataset,sequence)
end
end
if start then
@@ -3610,58 +3493,6 @@ local function c_run_multiple(head,font,attr,steps,nofsteps,dataset,sequence,rlm
return head, done
end
--- local function t_run_multiple(start,stop,font,attr,steps,nofsteps)
--- while start ~= stop do
--- local char = ischar(start,font)
--- if char then
--- local a -- happens often so no assignment is faster
--- if attr then
--- a = getattr(start,0)
--- end
--- local startnext = getnext(start)
--- if not a or (a == attr) then
--- for i=1,nofsteps do
--- local step = steps[i]
--- local lookupcache = step.coverage
--- if lookupcache then
--- local lookupmatch = lookupcache[char]
--- if lookupmatch then
--- -- if we need more than ligatures we can outline the code and use functions
--- local s = startnext
--- local l = nil
--- local d = 0
--- while s do
--- if s == stop then
--- d = 1
--- elseif d > 0 then
--- d = d + 1
--- end
--- local lg = lookupmatch[getchar(s)]
--- if lg then
--- l = lg
--- s = getnext(s)
--- else
--- break
--- end
--- end
--- if l and l.ligature then
--- return true, d > 1
--- end
--- end
--- else
--- report_missing_coverage(dataset,sequence)
--- end
--- end
--- else
--- -- go on can be a mixed one
--- end
--- start = startnext
--- else
--- break
--- end
--- end
--- end
-
local function t_run_multiple(start,stop,font,attr,steps,nofsteps)
local lastd = nil
while start ~= stop do
@@ -3676,29 +3507,30 @@ local function t_run_multiple(start,stop,font,attr,steps,nofsteps)
for i=1,nofsteps do
local step = steps[i]
local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- if we need more than ligatures we can outline the code and use functions
- local s = startnext
- local ss = nil
- local sstop = s == stop
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- if we need more than ligatures we can outline the code and use functions
+ local s = startnext
+ local ss = nil
+ local sstop = s == stop
+ if not s then
+ s = ss
+ ss = nil
+ end
+ while getid(s) == disc_code do
+ ss = getnext(s)
+ s = getfield(s,"replace")
if not s then
s = ss
ss = nil
end
- while getid(s) == disc_code do
- ss = getnext(s)
- s = getfield(s,"replace")
- if not s then
- s = ss
- ss = nil
- end
- end
- local l = nil
- local d = 0
- while s do
- local lg = lookupmatch[getchar(s)]
+ end
+ local l = nil
+ local d = 0
+ while s do
+ local char = ischar(s)
+ if char then
+ local lg = lookupmatch[char]
if lg then
if sstop then
d = 1
@@ -3723,13 +3555,13 @@ local function t_run_multiple(start,stop,font,attr,steps,nofsteps)
else
break
end
- end
- if l and l.ligature then
- lastd = d
+ else
+ break
end
end
- else
- report_missing_coverage(dataset,sequence)
+ if l and l.ligature then
+ lastd = d
+ end
end
end
else
@@ -3743,9 +3575,10 @@ local function t_run_multiple(start,stop,font,attr,steps,nofsteps)
break
end
end
+ return 0
end
-local function k_run_multiple(sub,injection,last,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+local function k_run_multiple(sub,injection,last,font,attr,steps,nofsteps,dataset,sequence,rlmode,skiphash,handler)
local a -- happens often so no assignment is faster
if attr then
a = getattr(sub,0)
@@ -3760,16 +3593,12 @@ local function k_run_multiple(sub,injection,last,font,attr,steps,nofsteps,datase
for i=1,nofsteps do
local step = steps[i]
local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- local h, d, ok = handler(head,n,dataset,sequence,lookupmatch,step,rlmode,i,injection)
- if ok then
- return true
- end
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local h, d, ok = handler(sub,n,dataset,sequence,lookupmatch,rlmode,skiphash,step,injection) -- sub was head
+ if ok then
+ return true
end
- else
- report_missing_coverage(dataset,sequence)
end
end
end
@@ -3780,44 +3609,127 @@ end
-- to be checked, nowadays we probably can assume properly matched directions
-- so maybe we no longer need a stack
+-- local function txtdirstate(start,stack,top,rlparmode)
+-- local dir = getdir(start)
+-- local new = 1
+-- if dir == "+TRT" then
+-- top = top + 1
+-- stack[top] = dir
+-- new = -1
+-- elseif dir == "+TLT" then
+-- top = top + 1
+-- stack[top] = dir
+-- elseif dir == "-TRT" or dir == "-TLT" then
+-- if top == 1 then
+-- top = 0
+-- new = rlparmode
+-- else
+-- top = top - 1
+-- if stack[top] == "+TRT" then
+-- new = -1
+-- end
+-- end
+-- else
+-- new = rlparmode
+-- end
+-- return getnext(start), top, new
+-- end
+--
+-- local function pardirstate(start)
+-- local dir = getdir(start)
+-- local new = 0
+-- if dir == "TLT" then
+-- new = 1
+-- elseif dir == "TRT" then
+-- new = -1
+-- end
+-- return getnext(start), new, new
+-- end
+
local function txtdirstate(start,stack,top,rlparmode)
+ local nxt = getnext(start)
local dir = getdir(start)
- local new = 1
if dir == "+TRT" then
top = top + 1
stack[top] = dir
- new = -1
+ return nxt, top, -1
elseif dir == "+TLT" then
top = top + 1
stack[top] = dir
+ return nxt, top, 1
elseif dir == "-TRT" or dir == "-TLT" then
- top = top - 1
- if stack[top] == "+TRT" then
- new = -1
+ if top == 1 then
+ return nxt, 0, rlparmode
+ else
+ top = top - 1
+ if stack[top] == "+TRT" then
+ return nxt, top, -1
+ else
+ return nxt, top, 1
+ end
end
else
- new = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %a: parmode %a, txtmode %a, level %a",dir,mref(rlparmode),mref(new),top)
+ return nxt, top, rlparmode
end
- return getnext(start), top, new
end
local function pardirstate(start)
+ local nxt = getnext(start)
local dir = getdir(start)
- local new = 0
if dir == "TLT" then
- new = 1
+ return nxt, 1, 1
elseif dir == "TRT" then
- new = -1
- end
- if trace_directions then
- report_process("directions after pardir %a: parmode %a",dir,mref(new))
+ return nxt, -1, -1
+ else
+ return nxt, 0, 0
end
- return getnext(start), new, new
end
+-- -- this will become:
+--
+-- local getdirection = nuts.getdirection
+--
+-- local function txtdirstate1(start,stack,top,rlparmode)
+-- local nxt = getnext(start)
+-- local dir, sub = getdirection(start)
+-- if sub then
+-- if top == 1 then
+-- return nxt, 0, rlparmode
+-- elseif dir < 2 then
+-- top = top - 1
+-- if stack[top] == 1 then
+-- return nxt, top, -1
+-- else
+-- return nxt, top, 1
+-- end
+-- else
+-- return nxt, top, rlparmode
+-- end
+-- elseif dir == 1 then
+-- top = top + 1
+-- stack[top] = 1
+-- return nxt, top, -1
+-- elseif dir == 0 then
+-- top = top + 1
+-- stack[top] = 0
+-- return nxt, top, 1
+-- else
+-- return nxt, top, rlparmode
+-- end
+-- end
+--
+-- local function pardirstate1(start)
+-- local nxt = getnext(start)
+-- local dir = getdirection(start)
+-- if dir == 0 then
+-- return nxt, 1, 1
+-- elseif dir == 1 then
+-- return nxt, -1, -1
+-- else
+-- return nxt, 0, 0
+-- end
+-- end
+
otf.helpers = otf.helpers or { }
otf.helpers.txtdirstate = txtdirstate
otf.helpers.pardirstate = pardirstate
@@ -3830,182 +3742,245 @@ otf.helpers.pardirstate = pardirstate
-- optimizations the principles of processing the features hasn't changed much since
-- the beginning.
-local function featuresprocessor(head,font,attr,direction)
-
- local sequences = sequencelists[font] -- temp hack
-
- if not sequencelists then
- return head, false
- end
+do
- nesting = nesting + 1
+ -- reference:
+ --
+ -- local a = attr and getattr(start,0)
+ -- if a then
+ -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
+ -- else
+ -- a = not attribute or getprop(start,a_state) == attribute
+ -- end
+ --
+ -- used:
+ --
+ -- local a -- happens often so no assignment is faster
+ -- if attr then
+ -- if getattr(start,0) == attr and (not attribute or getprop(start,a_state) == attribute) then
+ -- a = true
+ -- end
+ -- elseif not attribute or getprop(start,a_state) == attribute then
+ -- a = true
+ -- end
+
+ -- This is a measurable experimental speedup (only with hyphenated text and multiple
+ -- fonts per processor call), especially for fonts with lots of contextual lookups.
+
+ local fastdisc = true
+ local testdics = false
+
+ directives.register("otf.fastdisc",function(v) fastdisc = v end) -- normally enabled
+
+ -- using a merged combined hash as first test saves some 30% on ebgaramond and
+ -- about 15% on arabtype .. then moving the a test also saves a bit (even when
+ -- often a is not set at all so that one is a bit debatable
+
+ local otfdataset = nil -- todo: make an installer
+
+ local getfastdisc = { __index = function(t,k)
+ local v = usesfont(k,currentfont)
+ t[k] = v
+ return v
+ end }
+
+ local getfastspace = { __index = function(t,k)
+ -- we don't pass the id so that one can overload isspace
+ local v = isspace(k,threshold) or false
+ t[k] = v
+ return v
+ end }
+
+ function otf.featuresprocessor(head,font,attr,direction,n)
+
+ local sequences = sequencelists[font] -- temp hack
+
+ nesting = nesting + 1
+
+ if nesting == 1 then
+ currentfont = font
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions -- only needed in gref so we could pass node there instead
+ characters = tfmdata.characters -- but this branch is not entered that often anyway
+ local resources = tfmdata.resources
+ marks = resources.marks
+ classes = resources.classes
+ threshold,
+ factor = getthreshold(font)
+ checkmarks = tfmdata.properties.checkmarks
+
+ if not otfdataset then
+ otfdataset = otf.dataset
+ end
- if nesting == 1 then
- currentfont = font
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions -- only needed in gref so we could pass node there instead
- characters = tfmdata.characters -- but this branch is not entered that often anyway
- local resources = tfmdata.resources
- marks = resources.marks
- classes = resources.classes
- threshold,
- factor = getthreshold(font)
- checkmarks = tfmdata.properties.checkmarks
+ discs = fastdisc and n and n > 1 and setmetatable({},getfastdisc) -- maybe inline
+ spaces = setmetatable({},getfastspace)
- elseif currentfont ~= font then
+ elseif currentfont ~= font then
- report_warning("nested call with a different font, level %s, quitting",nesting)
- nesting = nesting - 1
- return head, false
+ report_warning("nested call with a different font, level %s, quitting",nesting)
+ nesting = nesting - 1
+ return head, false
- end
+ end
- -- some 10% faster when no dynamics but hardly measureable on real runs .. but: it only
- -- works when we have no other dynamics as otherwise the zero run will be applied to the
- -- whole stream for which we then need to pass another variable which we won't
+ -- some 10% faster when no dynamics but hardly measureable on real runs .. but: it only
+ -- works when we have no other dynamics as otherwise the zero run will be applied to the
+ -- whole stream for which we then need to pass another variable which we won't
- -- if attr == 0 then
- -- attr = false
- -- end
+ -- if attr == 0 then
+ -- attr = false
+ -- end
- head = tonut(head)
+ local head = tonut(head)
- if trace_steps then
- checkstep(head)
- end
-
- local initialrl = direction == "TRT" and -1 or 0
+ if trace_steps then
+ checkstep(head)
+ end
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
- local dirstack = { } -- could move outside function but we can have local runs
- sweephead = { }
-
- -- Keeping track of the headnode is needed for devanagari. (I generalized it a bit
- -- so that multiple cases are also covered.) We could prepend a temp node.
-
- -- We don't goto the next node when a disc node is created so that we can then treat
- -- the pre, post and replace. It's a bit of a hack but works out ok for most cases.
-
- for s=1,#datasets do
- local dataset = datasets[s]
- ----- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- local attribute = dataset[2]
- local sequence = dataset[3] -- sequences[s] -- also dataset[5]
- local rlparmode = initialrl
- local topstack = 0
- local typ = sequence.type
- local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- store in dataset
- local handler = handlers[typ]
- local steps = sequence.steps
- local nofsteps = sequence.nofsteps
- if not steps then
- -- this permits injection, watch the different arguments
- local h, d, ok = handler(head,head,dataset,sequence,nil,nil,nil,0,font,attr)
- if ok then
- done = true
- if h then
+ local initialrl = direction == "TRT" and -1 or 0
+ -- local initialrl = (direction == 1 or direction == "TRT") and -1 or 0
+
+ local done = false
+ -- local datasets = otf.dataset(tfmdata,font,attr)
+ local datasets = otfdataset(tfmdata,font,attr)
+ local dirstack = { } -- could move outside function but we can have local runs
+ sweephead = { }
+
+ -- Keeping track of the headnode is needed for devanagari. (I generalized it a bit
+ -- so that multiple cases are also covered.) We could prepend a temp node.
+
+ -- We don't goto the next node when a disc node is created so that we can then treat
+ -- the pre, post and replace. It's a bit of a hack but works out ok for most cases.
+
+ for s=1,#datasets do
+ local dataset = datasets[s]
+ local attribute = dataset[2]
+ local sequence = dataset[3] -- sequences[s] -- also dataset[5]
+ local rlparmode = initialrl
+ local topstack = 0
+ local typ = sequence.type
+ local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- store in dataset
+ local forcetestrun = typ == "gsub_ligature" -- testrun is only for ligatures
+ local handler = handlers[typ] -- store in dataset
+ local steps = sequence.steps
+ local nofsteps = sequence.nofsteps
+ local skiphash = sequence.skiphash
+
+ if not steps then
+ -- This permits injection, watch the different arguments. Watch out, the arguments passed
+ -- are not frozen as we might extend or change this. Is this used at all apart from some
+ -- experiments?
+ local h, ok = handler(head,dataset,sequence,initialrl,font,attr) -- less arguments now
+ if ok then
+ done = true
+ end
+ if h and h ~= head then
head = h
end
- end
- elseif typ == "gsub_reversecontextchain" then
- -- this is a limited case, no special treatments like 'init' etc
- local start = find_node_tail(head)
- local rlmode = 0 -- how important is this .. do we need to check for dir?
- while start do
- local char = ischar(start,font)
- if char then
- local a -- happens often so no assignment is faster
- if attr then
- a = getattr(start,0)
- end
- if not a or (a == attr) then
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- todo: disc?
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- done = true
- break
+ elseif typ == "gsub_reversecontextchain" then
+ --
+ -- This might need a check: if we have #before or #after > 0 then we might need to reverse
+ -- the before and after lists in the loader. But first I need to see a font that uses multiple
+ -- matches.
+ --
+ local start = find_node_tail(head)
+ local rlmode = 0 -- how important is this .. do we need to check for dir?
+ local merged = steps.merged
+ while start do
+ local char = ischar(start,font)
+ if char then
+ local m = merged[char]
+ if m then
+ local a -- happens often so no assignment is faster
+ if attr then
+ a = getattr(start,0)
+ end
+ if not a or (a == attr) then
+ for i=m[1],m[2] do
+ local step = steps[i]
+ -- for i=1,#m do
+ -- local step = m[i]
+ local lookupcache = step.coverage
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,skiphash,step)
+ if ok then
+ done = true
+ break
+ end
end
end
+ if start then
+ start = getprev(start)
+ end
else
- report_missing_coverage(dataset,sequence)
+ start = getprev(start)
end
- end
- if start then
+ else
start = getprev(start)
end
else
start = getprev(start)
end
- else
- start = getprev(start)
end
- end
- else
- local start = head
- local rlmode = initialrl
- if nofsteps == 1 then -- happens often
- local step = steps[1]
- local lookupcache = step.coverage
- if not lookupcache then
- report_missing_coverage(dataset,sequence)
- else
+ else
+ local start = head
+ local rlmode = initialrl
+ if nofsteps == 1 then -- happens often
+ local step = steps[1]
+ local lookupcache = step.coverage
while start do
local char, id = ischar(start,font)
if char then
- -- local a = attr and getattr(start,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(start,a_state) == attribute
- -- end
- local a -- happens often so no assignment is faster
- if attr then
- if getattr(start,0) == attr and (not attribute or getprop(start,a_state) == attribute) then
- a = true
- end
- elseif not attribute or getprop(start,a_state) == attribute then
- a = true
- end
- if a then
+ if skiphash and skiphash[char] then -- we never needed it here but let's try
+ start = getnext(start)
+ else
local lookupmatch = lookupcache[char]
if lookupmatch then
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,1)
- if ok then
- done = true
+ local a -- happens often so no assignment is faster
+ if attr then
+ if getattr(start,0) == attr and (not attribute or getprop(start,a_state) == attribute) then
+ a = true
+ end
+ elseif not attribute or getprop(start,a_state) == attribute then
+ a = true
end
+ if a then
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,skiphash,step)
+ if ok then
+ done = true
+ end
+ if start then
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
end
- if start then
- start = getnext(start)
- end
- else
- start = getnext(start)
end
- elseif char == false then
- -- whatever glyph
- start = getnext(start)
- elseif id == glue_code then
- -- happens often
- start = getnext(start)
+ elseif char == false or id == glue_code then
+ -- a different font|state or glue (happens often)
+ start = getnext(start)
elseif id == disc_code then
- local ok
- if gpossing then
- start, ok = kernrun(start,k_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
- elseif typ == "gsub_ligature" then
- start, ok = testrun(start,t_run_single,c_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
+ if not discs or discs[start] == true then
+ local ok
+ if gpossing then
+ start, ok = kernrun(start,k_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,skiphash,handler)
+ elseif forcetestrun then
+ start, ok = testrun(start,t_run_single,c_run_single,font,attr,lookupcache,step,dataset,sequence,rlmode,skiphash,handler)
+ else
+ start, ok = comprun(start,c_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,skiphash,handler)
+ end
+ if ok then
+ done = true
+ end
else
- start, ok = comprun(start,c_run_single, font,attr,lookupcache,step,dataset,sequence,rlmode,handler)
- end
- if ok then
- done = true
+ start = getnext(start)
end
elseif id == math_code then
start = getnext(end_of_math(start))
@@ -4017,95 +3992,200 @@ local function featuresprocessor(head,font,attr,direction)
start = getnext(start)
end
end
- end
-
- else
- while start do
- local char, id = ischar(start,font)
- if char then
- -- local a = attr and getattr(start,0)
- -- if a then
- -- a = (a == attr) and (not attribute or getprop(start,a_state) == attribute)
- -- else
- -- a = not attribute or getprop(start,a_state) == attribute
- -- end
- local a -- happens often so no assignment is faster
- if attr then
- if getattr(start,0) == attr and (not attribute or getprop(start,a_state) == attribute) then
- a = true
- end
- elseif not attribute or getprop(start,a_state) == attribute then
- a = true
- end
- if a then
- for i=1,nofsteps do
- local step = steps[i]
- local lookupcache = step.coverage
- if lookupcache then
- local lookupmatch = lookupcache[char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,step,i)
- if ok then
- done = true
- break
- elseif not start then
- -- don't ask why ... shouldn't happen
- break
+ else
+ local merged = steps.merged
+ while start do
+ local char, id = ischar(start,font)
+ if char then
+ if skiphash and skiphash[char] then -- we never needed it here but let's try
+ start = getnext(start)
+ else
+ local m = merged[char]
+ if m then
+ local a -- happens often so no assignment is faster
+ if attr then
+ if getattr(start,0) == attr and (not attribute or getprop(start,a_state) == attribute) then
+ a = true
end
+ elseif not attribute or getprop(start,a_state) == attribute then
+ a = true
+ end
+ if a then
+ for i=m[1],m[2] do
+ local step = steps[i]
+ -- for i=1,#m do
+ -- local step = m[i]
+ local lookupcache = step.coverage
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ -- we could move all code inline but that makes things even more unreadable
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,skiphash,step)
+ if ok then
+ done = true
+ break
+ elseif not start then
+ -- don't ask why ... shouldn't happen
+ break
+ end
+ end
+ end
+ if start then
+ start = getnext(start)
+ end
+ else
+ start = getnext(start)
end
else
- report_missing_coverage(dataset,sequence)
+ start = getnext(start)
end
end
- if start then
+ elseif char == false or id == glue_code then
+ -- a different font|state or glue (happens often)
+ start = getnext(start)
+ elseif id == disc_code then
+ if not discs or discs[start] == true then
+ local ok
+ if gpossing then
+ start, ok = kernrun(start,k_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,skiphash,handler)
+ elseif forcetestrun then
+ start, ok = testrun(start,t_run_multiple,c_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,skiphash,handler)
+ else
+ start, ok = comprun(start,c_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,skiphash,handler)
+ end
+ if ok then
+ done = true
+ end
+ else
start = getnext(start)
end
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ elseif id == dir_code then
+ start, topstack, rlmode = txtdirstate(start,dirstack,topstack,rlparmode)
+ elseif id == localpar_code then
+ start, rlparmode, rlmode = pardirstate(start)
else
start = getnext(start)
end
- elseif char == false then
- -- whatever glyph
- start = getnext(start)
- elseif id == glue_code then
- -- happens often
+ end
+ end
+ end
+
+ if trace_steps then -- ?
+ registerstep(head)
+ end
+
+ end
+
+ nesting = nesting - 1
+ head = tonode(head)
+
+ return head, done
+ end
+
+ -- This is not an official helpoer and used for tracing experiments. It can be changed as I like
+ -- at any moment. At some point it might be used in a module that can help font development.
+
+ function otf.datasetpositionprocessor(head,font,direction,dataset)
+
+ currentfont = font
+ tfmdata = fontdata[font]
+ descriptions = tfmdata.descriptions -- only needed in gref so we could pass node there instead
+ characters = tfmdata.characters -- but this branch is not entered that often anyway
+ local resources = tfmdata.resources
+ marks = resources.marks
+ classes = resources.classes
+ threshold,
+ factor = getthreshold(font)
+ checkmarks = tfmdata.properties.checkmarks
+
+ if type(dataset) == "number" then
+ dataset = otfdataset(tfmdata,font,0)[dataset]
+ end
+
+ local sequence = dataset[3] -- sequences[s] -- also dataset[5]
+ local typ = sequence.type
+ -- local gpossing = typ == "gpos_single" or typ == "gpos_pair" -- store in dataset
+
+ -- gpos_contextchain gpos_context
+
+ -- if not gpossing then
+ -- return head, false
+ -- end
+
+ local handler = handlers[typ] -- store in dataset
+ local steps = sequence.steps
+ local nofsteps = sequence.nofsteps
+
+ local head = tonut(head)
+ local done = false
+ local dirstack = { } -- could move outside function but we can have local runs
+ local start = head
+ local initialrl = direction == "TRT" and -1 or 0
+ -- local initialrl = (direction == 1 or direction == "TRT") and -1 or 0
+ local rlmode = initialrl
+ local rlparmode = initialrl
+ local topstack = 0
+ local merged = steps.merged
+
+ -- local matches = false
+ local position = 0
+
+ while start do
+ local char, id = ischar(start,font)
+ if char then
+ position = position + 1
+ local m = merged[char]
+ if m then
+ if skiphash and skiphash[char] then -- we never needed it here but let's try
start = getnext(start)
- elseif id == disc_code then
- local ok
- if gpossing then
- start, ok = kernrun(start,k_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
- elseif typ == "gsub_ligature" then
- start, ok = testrun(start,t_run_multiple,c_run_multiple,font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
- else
- start, ok = comprun(start,c_run_multiple, font,attr,steps,nofsteps,dataset,sequence,rlmode,handler)
+ else
+ for i=m[1],m[2] do
+ local step = steps[i]
+ local lookupcache = step.coverage
+ local lookupmatch = lookupcache[char]
+ if lookupmatch then
+ local ok
+ head, start, ok = handler(head,start,dataset,sequence,lookupmatch,rlmode,skiphash,step)
+ if ok then
+ -- if matches then
+ -- matches[position] = i
+ -- else
+ -- matches = { [position] = i }
+ -- end
+ break
+ elseif not start then
+ break
+ end
+ end
end
- if ok then
- done = true
+ if start then
+ start = getnext(start)
end
- elseif id == math_code then
- start = getnext(end_of_math(start))
- elseif id == dir_code then
- start, topstack, rlmode = txtdirstate(start,dirstack,topstack,rlparmode)
- elseif id == localpar_code then
- start, rlparmode, rlmode = pardirstate(start)
- else
- start = getnext(start)
end
+ else
+ start = getnext(start)
end
+ elseif char == false or id == glue_code then
+ -- a different font|state or glue (happens often)
+ start = getnext(start)
+ elseif id == math_code then
+ start = getnext(end_of_math(start))
+ elseif id == dir_code then
+ start, topstack, rlmode = txtdirstate(start,dirstack,topstack,rlparmode)
+ elseif id == localpar_code then
+ start, rlparmode, rlmode = pardirstate(start)
+ else
+ start = getnext(start)
end
end
- if trace_steps then -- ?
- registerstep(head)
- end
-
+ return tonode(head) -- , matches
end
- nesting = nesting - 1
- head = tonode(head)
+ -- end of experiment
- return head, done
end
-- so far
@@ -4119,26 +4199,26 @@ function otf.registerplugin(name,f)
end
end
-local function plugininitializer(tfmdata,value)
+function otf.plugininitializer(tfmdata,value)
if type(value) == "string" then
tfmdata.shared.plugin = plugins[value]
end
end
-local function pluginprocessor(head,font)
+function otf.pluginprocessor(head,font,attr,direction) -- n
local s = fontdata[font].shared
local p = s and s.plugin
if p then
if trace_plugins then
report_process("applying plugin %a",p[1])
end
- return p[2](head,font)
+ return p[2](head,font,attr,direction)
else
return head, false
end
end
-local function featuresinitializer(tfmdata,value)
+function otf.featuresinitializer(tfmdata,value)
-- nothing done here any more
end
@@ -4148,67 +4228,113 @@ registerotffeature {
default = true,
initializers = {
position = 1,
- node = featuresinitializer,
- plug = plugininitializer,
+ node = otf.featuresinitializer,
+ plug = otf.plugininitializer,
},
processors = {
- node = featuresprocessor,
- plug = pluginprocessor,
+ node = otf.featuresprocessor,
+ plug = otf.pluginprocessor,
}
}
-otf.nodemodeinitializer = featuresinitializer
-otf.featuresprocessor = featuresprocessor
-
--- This can be used for extra handlers, but should be used with care!
-
-otf.handlers = handlers -- used in devanagari
+-- This can be used for extra handlers, but should be used with care! We implement one
+-- here but some more can be found in the osd (script devanagary) file. Now watch out:
+-- when a handlers has steps, it is called as the other ones, but when we have no steps,
+-- we use a different call:
+--
+-- function(head,dataset,sequence,initialrl,font,attr)
+-- return head, done
+-- end
+--
+-- Also see (!!).
--- We implement one here:
+otf.handlers = handlers
local setspacekerns = nodes.injections.setspacekerns if not setspacekerns then os.exit() end
+local tag = "kern" -- maybe some day a merge
+
if fontfeatures then
- function otf.handlers.trigger_space_kerns(head,start,dataset,sequence,_,_,_,_,font,attr)
+ function handlers.trigger_space_kerns(head,dataset,sequence,initialrl,font,attr)
local features = fontfeatures[font]
- local enabled = features and features.spacekern and features.kern
+ local enabled = features and features.spacekern and features[tag]
if enabled then
setspacekerns(font,sequence)
end
- return head, start, enabled
+ return head, enabled
end
else -- generic (no hashes)
- function otf.handlers.trigger_space_kerns(head,start,dataset,sequence,_,_,_,_,font,attr)
+ function handlers.trigger_space_kerns(head,dataset,sequence,initialrl,font,attr)
local shared = fontdata[font].shared
local features = shared and shared.features
- local enabled = features and features.spacekern and features.kern
+ local enabled = features and features.spacekern and features[tag]
if enabled then
setspacekerns(font,sequence)
end
- return head, start, enabled
+ return head, enabled
end
end
+-- There are fonts out there that change the space but we don't do that kind of
+-- things in TeX.
+
local function hasspacekerns(data)
- local sequences = data.resources.sequences
- for i=1,#sequences do
- local sequence = sequences[i]
- local steps = sequence.steps
- if steps and sequence.features.kern then
- for i=1,#steps do
- local coverage = steps[i].coverage
- if not coverage then
- -- maybe an issue
- elseif coverage[32] then
- return true
- else
- for k, v in next, coverage do
- if v[32] then
- return true
+ local resources = data.resources
+ local sequences = resources.sequences
+ local validgpos = resources.features.gpos
+ if validgpos and sequences then
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local steps = sequence.steps
+ if steps and sequence.features[tag] then
+ local kind = sequence.type
+ if kind == "gpos_pair" or kind == "gpos_single" then
+ for i=1,#steps do
+ local step = steps[i]
+ local coverage = step.coverage
+ local rules = step.rules
+ if rules then
+ -- not now: analyze (simple) rules
+ elseif not coverage then
+ -- nothing to do
+ elseif kind == "gpos_single" then
+ -- maybe a message that we ignore
+ elseif kind == "gpos_pair" then
+ local format = step.format
+ if format == "move" or format == "kern" then
+ local kerns = coverage[32]
+ if kerns then
+ return true
+ end
+ for k, v in next, coverage do
+ if v[32] then
+ return true
+ end
+ end
+ elseif format == "pair" then
+ local kerns = coverage[32]
+ if kerns then
+ for k, v in next, kerns do
+ local one = v[1]
+ if one and one ~= true then
+ return true
+ end
+ end
+ end
+ for k, v in next, coverage do
+ local kern = v[32]
+ if kern then
+ local one = kern[1]
+ if one and one ~= true then
+ return true
+ end
+ end
+ end
+ end
end
end
end
@@ -4225,112 +4351,120 @@ otf.readers.registerextender {
end
}
--- we merge the lookups but we still honor the language / script
-
local function spaceinitializer(tfmdata,value) -- attr
local resources = tfmdata.resources
local spacekerns = resources and resources.spacekerns
- local properties = tfmdata.properties
if value and spacekerns == nil then
+ local rawdata = tfmdata.shared and tfmdata.shared.rawdata
+ local properties = rawdata.properties
if properties and properties.hasspacekerns then
local sequences = resources.sequences
- local left = { }
- local right = { }
- local last = 0
- local feat = nil
- for i=1,#sequences do
- local sequence = sequences[i]
- local steps = sequence.steps
- if steps then
- local kern = sequence.features.kern
- if kern then
- if feat then
- for script, languages in next, kern do
- local f = feat[script]
- if f then
- for l in next, languages do
- f[l] = true
- end
- else
- feat[script] = languages
- end
- end
- else
- feat = kern
- end
- for i=1,#steps do
- local step = steps[i]
- local coverage = step.coverage
- local rules = step.rules
- local format = step.format
- if rules then
- -- not now: analyze (simple) rules
- elseif coverage then
- -- what to do if we have no [1] but only [2]
- local single = format == gpos_single
- local kerns = coverage[32]
- if kerns then
- for k, v in next, kerns do
- if type(v) ~= "table" then
- right[k] = v
- elseif single then
- right[k] = v[3]
- else
- local one = v[1]
- if one then
- right[k] = one[3]
+ local validgpos = resources.features.gpos
+ if validgpos and sequences then
+ local left = { }
+ local right = { }
+ local last = 0
+ local feat = nil
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local steps = sequence.steps
+ if steps then
+ -- we don't support space kerns in other features
+ local kern = sequence.features[tag]
+ if kern then
+ local kind = sequence.type
+ if kind == "gpos_pair" or kind == "gpos_single" then
+ if feat then
+ for script, languages in next, kern do
+ local f = feat[script]
+ if f then
+ for l in next, languages do
+ f[l] = true
end
+ else
+ feat[script] = languages
end
end
+ else
+ feat = kern
end
- for k, v in next, coverage do
- local kern = v[32]
- if kern then
- if type(kern) ~= "table" then
- left[k] = kern
- elseif single then
- left[k] = kern[3]
- else
- local one = kern[1]
- if one then
- left[k] = one[3]
+ for i=1,#steps do
+ local step = steps[i]
+ local coverage = step.coverage
+ local rules = step.rules
+ if rules then
+ -- not now: analyze (simple) rules
+ elseif not coverage then
+ -- nothng to do
+ elseif kind == "gpos_single" then
+ -- makes no sense in TeX
+ elseif kind == "gpos_pair" then
+ local format = step.format
+ if format == "move" or format == "kern" then
+ local kerns = coverage[32]
+ if kerns then
+ for k, v in next, kerns do
+ right[k] = v
+ end
+ end
+ for k, v in next, coverage do
+ local kern = v[32]
+ if kern then
+ left[k] = kern
+ end
+ end
+ elseif format == "pair" then
+ local kerns = coverage[32]
+ if kerns then
+ for k, v in next, kerns do
+ local one = v[1]
+ if one and one ~= true then
+ right[k] = one[3]
+ end
+ end
+ end
+ for k, v in next, coverage do
+ local kern = v[32]
+ if kern then
+ local one = kern[1]
+ if one and one ~= true then
+ left[k] = one[3]
+ end
+ end
end
end
end
end
+ last = i
end
+ else
+ -- no steps ... needed for old one ... we could use the basekerns
+ -- instead
end
- last = i
end
- else
- -- no steps ... needed for old one ... we could use the basekerns
- -- instead
end
- end
- left = next(left) and left or false
- right = next(right) and right or false
- if left or right then
- spacekerns = {
- left = left,
- right = right,
- }
- if last > 0 then
- local triggersequence = {
- features = { kern = feat or { dflt = { dflt = true, } } },
- flags = noflags,
- name = "trigger_space_kerns",
- order = { "kern" },
- type = "trigger_space_kerns",
- left = left,
- right = right,
+ left = next(left) and left or false
+ right = next(right) and right or false
+ if left or right then
+ spacekerns = {
+ left = left,
+ right = right,
}
- insert(sequences,last,triggersequence)
+ if last > 0 then
+ local triggersequence = {
+ -- no steps, see (!!)
+ features = { [tag] = feat or { dflt = { dflt = true, } } },
+ flags = noflags,
+ name = "trigger_space_kerns",
+ order = { tag },
+ type = "trigger_space_kerns",
+ left = left,
+ right = right,
+ }
+ insert(sequences,last,triggersequence)
+ end
end
- else
- spacekerns = false
end
- else
- spacekerns = false
end
resources.spacekerns = spacekerns
end
diff --git a/tex/context/base/mkiv/font-ott.lua b/tex/context/base/mkiv/font-ott.lua
index cba3758dc..59d92f40d 100644
--- a/tex/context/base/mkiv/font-ott.lua
+++ b/tex/context/base/mkiv/font-ott.lua
@@ -1,6 +1,6 @@
-if not modules then modules = { } end modules ['font-ott'] = {
+if not modules then modules = { } end modules ["font-ott"] = {
version = 1.001,
- comment = "companion to font-otf.lua (tables)",
+ comment = "companion to font-ini.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
@@ -26,118 +26,118 @@ local statistics = otf.statistics or { }
otf.statistics = statistics
local scripts = allocate {
- ['arab'] = 'arabic',
- ['armi'] = 'imperial aramaic',
- ['armn'] = 'armenian',
- ['avst'] = 'avestan',
- ['bali'] = 'balinese',
- ['bamu'] = 'bamum',
- ['batk'] = 'batak',
- ['beng'] = 'bengali',
- ['bng2'] = 'bengali variant 2',
- ['bopo'] = 'bopomofo',
- ['brah'] = 'brahmi',
- ['brai'] = 'braille',
- ['bugi'] = 'buginese',
- ['buhd'] = 'buhid',
- ['byzm'] = 'byzantine music',
- ['cakm'] = 'chakma',
- ['cans'] = 'canadian syllabics',
- ['cari'] = 'carian',
- ['cham'] = 'cham',
- ['cher'] = 'cherokee',
- ['copt'] = 'coptic',
- ['cprt'] = 'cypriot syllabary',
- ['cyrl'] = 'cyrillic',
- ['deva'] = 'devanagari',
- ['dev2'] = 'devanagari variant 2',
- ['dsrt'] = 'deseret',
- ['egyp'] = 'egyptian heiroglyphs',
- ['ethi'] = 'ethiopic',
- ['geor'] = 'georgian',
- ['glag'] = 'glagolitic',
- ['goth'] = 'gothic',
- ['grek'] = 'greek',
- ['gujr'] = 'gujarati',
- ['gjr2'] = 'gujarati variant 2',
- ['guru'] = 'gurmukhi',
- ['gur2'] = 'gurmukhi variant 2',
- ['hang'] = 'hangul',
- ['hani'] = 'cjk ideographic',
- ['hano'] = 'hanunoo',
- ['hebr'] = 'hebrew',
- ['ital'] = 'old italic',
- ['jamo'] = 'hangul jamo',
- ['java'] = 'javanese',
- ['kali'] = 'kayah li',
- ['kana'] = 'hiragana and katakana',
- ['khar'] = 'kharosthi',
- ['khmr'] = 'khmer',
- ['knda'] = 'kannada',
- ['knd2'] = 'kannada variant 2',
- ['kthi'] = 'kaithi',
- ['lana'] = 'tai tham',
- ['lao' ] = 'lao',
- ['latn'] = 'latin',
- ['lepc'] = 'lepcha',
- ['limb'] = 'limbu',
- ['linb'] = 'linear b',
- ['lisu'] = 'lisu',
- ['lyci'] = 'lycian',
- ['lydi'] = 'lydian',
- ['mand'] = 'mandaic and mandaean',
- ['math'] = 'mathematical alphanumeric symbols',
- ['merc'] = 'meroitic cursive',
- ['mero'] = 'meroitic hieroglyphs',
- ['mlym'] = 'malayalam',
- ['mlm2'] = 'malayalam variant 2',
- ['mong'] = 'mongolian',
- ['mtei'] = 'meitei Mayek',
- ['musc'] = 'musical symbols',
- ['mym2'] = 'myanmar variant 2',
- ['mymr'] = 'myanmar',
- ['nko' ] = "n'ko",
- ['ogam'] = 'ogham',
- ['olck'] = 'ol chiki',
- ['orkh'] = 'old turkic and orkhon runic',
- ['orya'] = 'oriya',
- ['ory2'] = 'odia variant 2',
- ['osma'] = 'osmanya',
- ['phag'] = 'phags-pa',
- ['phli'] = 'inscriptional pahlavi',
- ['phnx'] = 'phoenician',
- ['prti'] = 'inscriptional parthian',
- ['rjng'] = 'rejang',
- ['runr'] = 'runic',
- ['samr'] = 'samaritan',
- ['sarb'] = 'old south arabian',
- ['saur'] = 'saurashtra',
- ['shaw'] = 'shavian',
- ['shrd'] = 'sharada',
- ['sinh'] = 'sinhala',
- ['sora'] = 'sora sompeng',
- ['sund'] = 'sundanese',
- ['sylo'] = 'syloti nagri',
- ['syrc'] = 'syriac',
- ['tagb'] = 'tagbanwa',
- ['takr'] = 'takri',
- ['tale'] = 'tai le',
- ['talu'] = 'tai lu',
- ['taml'] = 'tamil',
- ['tavt'] = 'tai viet',
- ['telu'] = 'telugu',
- ['tel2'] = 'telugu variant 2',
- ['tfng'] = 'tifinagh',
- ['tglg'] = 'tagalog',
- ['thaa'] = 'thaana',
- ['thai'] = 'thai',
- ['tibt'] = 'tibetan',
- ['tml2'] = 'tamil variant 2',
- ['ugar'] = 'ugaritic cuneiform',
- ['vai' ] = 'vai',
- ['xpeo'] = 'old persian cuneiform',
- ['xsux'] = 'sumero-akkadian cuneiform',
- ['yi' ] = 'yi',
+ ["arab"] = "arabic",
+ ["armi"] = "imperial aramaic",
+ ["armn"] = "armenian",
+ ["avst"] = "avestan",
+ ["bali"] = "balinese",
+ ["bamu"] = "bamum",
+ ["batk"] = "batak",
+ ["beng"] = "bengali",
+ ["bng2"] = "bengali variant 2",
+ ["bopo"] = "bopomofo",
+ ["brah"] = "brahmi",
+ ["brai"] = "braille",
+ ["bugi"] = "buginese",
+ ["buhd"] = "buhid",
+ ["byzm"] = "byzantine music",
+ ["cakm"] = "chakma",
+ ["cans"] = "canadian syllabics",
+ ["cari"] = "carian",
+ ["cham"] = "cham",
+ ["cher"] = "cherokee",
+ ["copt"] = "coptic",
+ ["cprt"] = "cypriot syllabary",
+ ["cyrl"] = "cyrillic",
+ ["deva"] = "devanagari",
+ ["dev2"] = "devanagari variant 2",
+ ["dsrt"] = "deseret",
+ ["egyp"] = "egyptian heiroglyphs",
+ ["ethi"] = "ethiopic",
+ ["geor"] = "georgian",
+ ["glag"] = "glagolitic",
+ ["goth"] = "gothic",
+ ["grek"] = "greek",
+ ["gujr"] = "gujarati",
+ ["gjr2"] = "gujarati variant 2",
+ ["guru"] = "gurmukhi",
+ ["gur2"] = "gurmukhi variant 2",
+ ["hang"] = "hangul",
+ ["hani"] = "cjk ideographic",
+ ["hano"] = "hanunoo",
+ ["hebr"] = "hebrew",
+ ["ital"] = "old italic",
+ ["jamo"] = "hangul jamo",
+ ["java"] = "javanese",
+ ["kali"] = "kayah li",
+ ["kana"] = "hiragana and katakana",
+ ["khar"] = "kharosthi",
+ ["khmr"] = "khmer",
+ ["knda"] = "kannada",
+ ["knd2"] = "kannada variant 2",
+ ["kthi"] = "kaithi",
+ ["lana"] = "tai tham",
+ ["lao" ] = "lao",
+ ["latn"] = "latin",
+ ["lepc"] = "lepcha",
+ ["limb"] = "limbu",
+ ["linb"] = "linear b",
+ ["lisu"] = "lisu",
+ ["lyci"] = "lycian",
+ ["lydi"] = "lydian",
+ ["mand"] = "mandaic and mandaean",
+ ["math"] = "mathematical alphanumeric symbols",
+ ["merc"] = "meroitic cursive",
+ ["mero"] = "meroitic hieroglyphs",
+ ["mlym"] = "malayalam",
+ ["mlm2"] = "malayalam variant 2",
+ ["mong"] = "mongolian",
+ ["mtei"] = "meitei Mayek",
+ ["musc"] = "musical symbols",
+ ["mym2"] = "myanmar variant 2",
+ ["mymr"] = "myanmar",
+ ["nko" ] = 'n"ko',
+ ["ogam"] = "ogham",
+ ["olck"] = "ol chiki",
+ ["orkh"] = "old turkic and orkhon runic",
+ ["orya"] = "oriya",
+ ["ory2"] = "odia variant 2",
+ ["osma"] = "osmanya",
+ ["phag"] = "phags-pa",
+ ["phli"] = "inscriptional pahlavi",
+ ["phnx"] = "phoenician",
+ ["prti"] = "inscriptional parthian",
+ ["rjng"] = "rejang",
+ ["runr"] = "runic",
+ ["samr"] = "samaritan",
+ ["sarb"] = "old south arabian",
+ ["saur"] = "saurashtra",
+ ["shaw"] = "shavian",
+ ["shrd"] = "sharada",
+ ["sinh"] = "sinhala",
+ ["sora"] = "sora sompeng",
+ ["sund"] = "sundanese",
+ ["sylo"] = "syloti nagri",
+ ["syrc"] = "syriac",
+ ["tagb"] = "tagbanwa",
+ ["takr"] = "takri",
+ ["tale"] = "tai le",
+ ["talu"] = "tai lu",
+ ["taml"] = "tamil",
+ ["tavt"] = "tai viet",
+ ["telu"] = "telugu",
+ ["tel2"] = "telugu variant 2",
+ ["tfng"] = "tifinagh",
+ ["tglg"] = "tagalog",
+ ["thaa"] = "thaana",
+ ["thai"] = "thai",
+ ["tibt"] = "tibetan",
+ ["tml2"] = "tamil variant 2",
+ ["ugar"] = "ugaritic cuneiform",
+ ["vai" ] = "vai",
+ ["xpeo"] = "old persian cuneiform",
+ ["xsux"] = "sumero-akkadian cuneiform",
+ ["yi" ] = "yi",
}
local languages = allocate {
@@ -302,7 +302,7 @@ local languages = allocate {
["gez" ] = "ge'ez",
["gih" ] = "githabul",
["gil" ] = "gilyak",
- ["gil0"] = " kiribati (gilbertese)",
+ ["gil0"] = "kiribati (gilbertese)",
["gkp" ] = "kpelle (guinea)",
["glk" ] = "gilaki",
["gmz" ] = "gumuz",
@@ -726,165 +726,166 @@ local languages = allocate {
}
local features = allocate {
- ['aalt'] = 'access all alternates',
- ['abvf'] = 'above-base forms',
- ['abvm'] = 'above-base mark positioning',
- ['abvs'] = 'above-base substitutions',
- ['afrc'] = 'alternative fractions',
- ['akhn'] = 'akhands',
- ['blwf'] = 'below-base forms',
- ['blwm'] = 'below-base mark positioning',
- ['blws'] = 'below-base substitutions',
- ['c2pc'] = 'petite capitals from capitals',
- ['c2sc'] = 'small capitals from capitals',
- ['calt'] = 'contextual alternates',
- ['case'] = 'case-sensitive forms',
- ['ccmp'] = 'glyph composition/decomposition',
- ['cfar'] = 'conjunct form after ro',
- ['cjct'] = 'conjunct forms',
- ['clig'] = 'contextual ligatures',
- ['cpct'] = 'centered cjk punctuation',
- ['cpsp'] = 'capital spacing',
- ['cswh'] = 'contextual swash',
- ['curs'] = 'cursive positioning',
- ['dflt'] = 'default processing',
- ['dist'] = 'distances',
- ['dlig'] = 'discretionary ligatures',
- ['dnom'] = 'denominators',
- ['dtls'] = 'dotless forms', -- math
- ['expt'] = 'expert forms',
- ['falt'] = 'final glyph alternates',
- ['fin2'] = 'terminal forms #2',
- ['fin3'] = 'terminal forms #3',
- ['fina'] = 'terminal forms',
- ['flac'] = 'flattened accents over capitals', -- math
- ['frac'] = 'fractions',
- ['fwid'] = 'full width',
- ['half'] = 'half forms',
- ['haln'] = 'halant forms',
- ['halt'] = 'alternate half width',
- ['hist'] = 'historical forms',
- ['hkna'] = 'horizontal kana alternates',
- ['hlig'] = 'historical ligatures',
- ['hngl'] = 'hangul',
- ['hojo'] = 'hojo kanji forms',
- ['hwid'] = 'half width',
- ['init'] = 'initial forms',
- ['isol'] = 'isolated forms',
- ['ital'] = 'italics',
- ['jalt'] = 'justification alternatives',
- ['jp04'] = 'jis2004 forms',
- ['jp78'] = 'jis78 forms',
- ['jp83'] = 'jis83 forms',
- ['jp90'] = 'jis90 forms',
- ['kern'] = 'kerning',
- ['lfbd'] = 'left bounds',
- ['liga'] = 'standard ligatures',
- ['ljmo'] = 'leading jamo forms',
- ['lnum'] = 'lining figures',
- ['locl'] = 'localized forms',
- ['ltra'] = 'left-to-right alternates',
- ['ltrm'] = 'left-to-right mirrored forms',
- ['mark'] = 'mark positioning',
- ['med2'] = 'medial forms #2',
- ['medi'] = 'medial forms',
- ['mgrk'] = 'mathematical greek',
- ['mkmk'] = 'mark to mark positioning',
- ['mset'] = 'mark positioning via substitution',
- ['nalt'] = 'alternate annotation forms',
- ['nlck'] = 'nlc kanji forms',
- ['nukt'] = 'nukta forms',
- ['numr'] = 'numerators',
- ['onum'] = 'old style figures',
- ['opbd'] = 'optical bounds',
- ['ordn'] = 'ordinals',
- ['ornm'] = 'ornaments',
- ['palt'] = 'proportional alternate width',
- ['pcap'] = 'petite capitals',
- ['pkna'] = 'proportional kana',
- ['pnum'] = 'proportional figures',
- ['pref'] = 'pre-base forms',
- ['pres'] = 'pre-base substitutions',
- ['pstf'] = 'post-base forms',
- ['psts'] = 'post-base substitutions',
- ['pwid'] = 'proportional widths',
- ['qwid'] = 'quarter widths',
- ['rand'] = 'randomize',
- ['rclt'] = 'required contextual alternates',
- ['rkrf'] = 'rakar forms',
- ['rlig'] = 'required ligatures',
- ['rphf'] = 'reph form',
- ['rtbd'] = 'right bounds',
- ['rtla'] = 'right-to-left alternates',
- ['rtlm'] = 'right to left math', -- math
- ['ruby'] = 'ruby notation forms',
- ['salt'] = 'stylistic alternates',
- ['sinf'] = 'scientific inferiors',
- ['size'] = 'optical size',
- ['smcp'] = 'small capitals',
- ['smpl'] = 'simplified forms',
- -- ['ss01'] = 'stylistic set 1',
- -- ['ss02'] = 'stylistic set 2',
- -- ['ss03'] = 'stylistic set 3',
- -- ['ss04'] = 'stylistic set 4',
- -- ['ss05'] = 'stylistic set 5',
- -- ['ss06'] = 'stylistic set 6',
- -- ['ss07'] = 'stylistic set 7',
- -- ['ss08'] = 'stylistic set 8',
- -- ['ss09'] = 'stylistic set 9',
- -- ['ss10'] = 'stylistic set 10',
- -- ['ss11'] = 'stylistic set 11',
- -- ['ss12'] = 'stylistic set 12',
- -- ['ss13'] = 'stylistic set 13',
- -- ['ss14'] = 'stylistic set 14',
- -- ['ss15'] = 'stylistic set 15',
- -- ['ss16'] = 'stylistic set 16',
- -- ['ss17'] = 'stylistic set 17',
- -- ['ss18'] = 'stylistic set 18',
- -- ['ss19'] = 'stylistic set 19',
- -- ['ss20'] = 'stylistic set 20',
- ['ssty'] = 'script style', -- math
- ['stch'] = 'stretching glyph decomposition',
- ['subs'] = 'subscript',
- ['sups'] = 'superscript',
- ['swsh'] = 'swash',
- ['titl'] = 'titling',
- ['tjmo'] = 'trailing jamo forms',
- ['tnam'] = 'traditional name forms',
- ['tnum'] = 'tabular figures',
- ['trad'] = 'traditional forms',
- ['twid'] = 'third widths',
- ['unic'] = 'unicase',
- ['valt'] = 'alternate vertical metrics',
- ['vatu'] = 'vattu variants',
- ['vert'] = 'vertical writing',
- ['vhal'] = 'alternate vertical half metrics',
- ['vjmo'] = 'vowel jamo forms',
- ['vkna'] = 'vertical kana alternates',
- ['vkrn'] = 'vertical kerning',
- ['vpal'] = 'proportional alternate vertical metrics',
- ['vrt2'] = 'vertical rotation',
- ['zero'] = 'slashed zero',
+ ["aalt"] = "access all alternates",
+ ["abvf"] = "above-base forms",
+ ["abvm"] = "above-base mark positioning",
+ ["abvs"] = "above-base substitutions",
+ ["afrc"] = "alternative fractions",
+ ["akhn"] = "akhands",
+ ["blwf"] = "below-base forms",
+ ["blwm"] = "below-base mark positioning",
+ ["blws"] = "below-base substitutions",
+ ["c2pc"] = "petite capitals from capitals",
+ ["c2sc"] = "small capitals from capitals",
+ ["calt"] = "contextual alternates",
+ ["case"] = "case-sensitive forms",
+ ["ccmp"] = "glyph composition/decomposition",
+ ["cfar"] = "conjunct form after ro",
+ ["cjct"] = "conjunct forms",
+ ["clig"] = "contextual ligatures",
+ ["cpct"] = "centered cjk punctuation",
+ ["cpsp"] = "capital spacing",
+ ["cswh"] = "contextual swash",
+ ["curs"] = "cursive positioning",
+ ["dflt"] = "default processing",
+ ["dist"] = "distances",
+ ["dlig"] = "discretionary ligatures",
+ ["dnom"] = "denominators",
+ ["dtls"] = "dotless forms", -- math
+ ["expt"] = "expert forms",
+ ["falt"] = "final glyph alternates",
+ ["fin2"] = "terminal forms #2",
+ ["fin3"] = "terminal forms #3",
+ ["fina"] = "terminal forms",
+ ["flac"] = "flattened accents over capitals", -- math
+ ["frac"] = "fractions",
+ ["fwid"] = "full width",
+ ["half"] = "half forms",
+ ["haln"] = "halant forms",
+ ["halt"] = "alternate half width",
+ ["hist"] = "historical forms",
+ ["hkna"] = "horizontal kana alternates",
+ ["hlig"] = "historical ligatures",
+ ["hngl"] = "hangul",
+ ["hojo"] = "hojo kanji forms",
+ ["hwid"] = "half width",
+ ["init"] = "initial forms",
+ ["isol"] = "isolated forms",
+ ["ital"] = "italics",
+ ["jalt"] = "justification alternatives",
+ ["jp04"] = "jis2004 forms",
+ ["jp78"] = "jis78 forms",
+ ["jp83"] = "jis83 forms",
+ ["jp90"] = "jis90 forms",
+ ["kern"] = "kerning",
+ ["lfbd"] = "left bounds",
+ ["liga"] = "standard ligatures",
+ ["ljmo"] = "leading jamo forms",
+ ["lnum"] = "lining figures",
+ ["locl"] = "localized forms",
+ ["ltra"] = "left-to-right alternates",
+ ["ltrm"] = "left-to-right mirrored forms",
+ ["mark"] = "mark positioning",
+ ["med2"] = "medial forms #2",
+ ["medi"] = "medial forms",
+ ["mgrk"] = "mathematical greek",
+ ["mkmk"] = "mark to mark positioning",
+ ["mset"] = "mark positioning via substitution",
+ ["nalt"] = "alternate annotation forms",
+ ["nlck"] = "nlc kanji forms",
+ ["nukt"] = "nukta forms",
+ ["numr"] = "numerators",
+ ["onum"] = "old style figures",
+ ["opbd"] = "optical bounds",
+ ["ordn"] = "ordinals",
+ ["ornm"] = "ornaments",
+ ["palt"] = "proportional alternate width",
+ ["pcap"] = "petite capitals",
+ ["pkna"] = "proportional kana",
+ ["pnum"] = "proportional figures",
+ ["pref"] = "pre-base forms",
+ ["pres"] = "pre-base substitutions",
+ ["pstf"] = "post-base forms",
+ ["psts"] = "post-base substitutions",
+ ["pwid"] = "proportional widths",
+ ["qwid"] = "quarter widths",
+ ["rand"] = "randomize",
+ ["rclt"] = "required contextual alternates",
+ ["rkrf"] = "rakar forms",
+ ["rlig"] = "required ligatures",
+ ["rphf"] = "reph form",
+ ["rtbd"] = "right bounds",
+ ["rtla"] = "right-to-left alternates",
+ ["rtlm"] = "right to left mirrored forms",
+ ["rvrn"] = "required variation alternates",
+ ["ruby"] = "ruby notation forms",
+ ["salt"] = "stylistic alternates",
+ ["sinf"] = "scientific inferiors",
+ ["size"] = "optical size", -- now stat table
+ ["smcp"] = "small capitals",
+ ["smpl"] = "simplified forms",
+ -- ["ss01"] = "stylistic set 1",
+ -- ["ss02"] = "stylistic set 2",
+ -- ["ss03"] = "stylistic set 3",
+ -- ["ss04"] = "stylistic set 4",
+ -- ["ss05"] = "stylistic set 5",
+ -- ["ss06"] = "stylistic set 6",
+ -- ["ss07"] = "stylistic set 7",
+ -- ["ss08"] = "stylistic set 8",
+ -- ["ss09"] = "stylistic set 9",
+ -- ["ss10"] = "stylistic set 10",
+ -- ["ss11"] = "stylistic set 11",
+ -- ["ss12"] = "stylistic set 12",
+ -- ["ss13"] = "stylistic set 13",
+ -- ["ss14"] = "stylistic set 14",
+ -- ["ss15"] = "stylistic set 15",
+ -- ["ss16"] = "stylistic set 16",
+ -- ["ss17"] = "stylistic set 17",
+ -- ["ss18"] = "stylistic set 18",
+ -- ["ss19"] = "stylistic set 19",
+ -- ["ss20"] = "stylistic set 20",
+ ["ssty"] = "script style", -- math
+ ["stch"] = "stretching glyph decomposition",
+ ["subs"] = "subscript",
+ ["sups"] = "superscript",
+ ["swsh"] = "swash",
+ ["titl"] = "titling",
+ ["tjmo"] = "trailing jamo forms",
+ ["tnam"] = "traditional name forms",
+ ["tnum"] = "tabular figures",
+ ["trad"] = "traditional forms",
+ ["twid"] = "third widths",
+ ["unic"] = "unicase",
+ ["valt"] = "alternate vertical metrics",
+ ["vatu"] = "vattu variants",
+ ["vert"] = "vertical writing",
+ ["vhal"] = "alternate vertical half metrics",
+ ["vjmo"] = "vowel jamo forms",
+ ["vkna"] = "vertical kana alternates",
+ ["vkrn"] = "vertical kerning",
+ ["vpal"] = "proportional alternate vertical metrics",
+ ["vrt2"] = "vertical rotation",
+ ["zero"] = "slashed zero",
- ['trep'] = 'traditional tex replacements',
- ['tlig'] = 'traditional tex ligatures',
+ ["trep"] = "traditional tex replacements",
+ ["tlig"] = "traditional tex ligatures",
- ['ss..'] = 'stylistic set ..',
- ['cv..'] = 'character variant ..',
- ['js..'] = 'justification ..',
+ ["ss.."] = "stylistic set ..",
+ ["cv.."] = "character variant ..",
+ ["js.."] = "justification ..",
["dv.."] = "devanagari ..",
["ml.."] = "malayalam ..",
}
local baselines = allocate {
- ['hang'] = 'hanging baseline',
- ['icfb'] = 'ideographic character face bottom edge baseline',
- ['icft'] = 'ideographic character face tope edige baseline',
- ['ideo'] = 'ideographic em-box bottom edge baseline',
- ['idtp'] = 'ideographic em-box top edge baseline',
- ['math'] = 'mathematical centered baseline',
- ['romn'] = 'roman baseline'
+ ["hang"] = "hanging baseline",
+ ["icfb"] = "ideographic character face bottom edge baseline",
+ ["icft"] = "ideographic character face tope edige baseline",
+ ["ideo"] = "ideographic em-box bottom edge baseline",
+ ["idtp"] = "ideographic em-box top edge baseline",
+ ["math"] = "mathematical centered baseline",
+ ["romn"] = "roman baseline"
}
tables.scripts = scripts
diff --git a/tex/context/base/mkiv/font-oup.lua b/tex/context/base/mkiv/font-oup.lua
index 75ae08526..79ac76abe 100644
--- a/tex/context/base/mkiv/font-oup.lua
+++ b/tex/context/base/mkiv/font-oup.lua
@@ -15,6 +15,7 @@ local formatters = string.formatters
local sortedkeys = table.sortedkeys
local sortedhash = table.sortedhash
local tohash = table.tohash
+local setmetatableindex = table.setmetatableindex
local report = logs.reporter("otf reader")
@@ -29,8 +30,8 @@ local f_index = formatters["I%05X"]
local f_character_y = formatters["%C"]
local f_character_n = formatters["[ %C ]"]
-local check_duplicates = true -- can become an option (pseudo feature) / aways needed anyway
-local check_soft_hyphen = false -- can become an option (pseudo feature) / needed for tagging
+local check_duplicates = true -- can become an option (pseudo feature) / aways needed anyway
+local check_soft_hyphen = true -- can become an option (pseudo feature) / needed for tagging
directives.register("otf.checksofthyphen",function(v)
check_soft_hyphen = v
@@ -370,6 +371,7 @@ local function copyduplicates(fontdata)
local duplicates = resources.duplicates
if check_soft_hyphen then
-- ebgaramond has a zero width empty soft hyphen
+ -- antykwatorunsks lacks a soft hyphen
local ds = descriptions[0xAD]
if not ds or ds.width == 0 then
if ds then
@@ -621,7 +623,7 @@ local function checklookups(fontdata,missing,nofmissing)
end
end
if next(done) then
- report("not unicoded: % t",table.sortedkeys(done))
+ report("not unicoded: % t",sortedkeys(done))
end
end
end
@@ -632,7 +634,6 @@ local function unifymissing(fontdata)
require("font-agl")
end
local unicodes = { }
- local private = fontdata.private
local resources = fontdata.resources
resources.unicodes = unicodes
for unicode, d in next, fontdata.descriptions do
@@ -1066,13 +1067,14 @@ function readers.pack(data)
end
end
- local function pack_flat(v)
- local tag = tabstr_flat(v)
+ local function pack_normal_cc(v)
+ local tag = tabstr_normal(v)
local ht = h[tag]
if ht then
c[ht] = c[ht] + 1
return ht
else
+ v[1] = 0
nt = nt + 1
t[nt] = v
h[tag] = nt
@@ -1081,8 +1083,8 @@ function readers.pack(data)
end
end
- local function pack_boolean(v)
- local tag = tabstr_boolean(v)
+ local function pack_flat(v)
+ local tag = tabstr_flat(v)
local ht = h[tag]
if ht then
c[ht] = c[ht] + 1
@@ -1126,6 +1128,84 @@ function readers.pack(data)
end
end
+ -- saves a lot on noto sans
+
+ -- can be made more clever
+
+ local function pack_boolean(v)
+ local tag = tabstr_boolean(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+
+ -- -- This was an experiment to see if we can bypass the luajit limits but loading is
+ -- -- still an issue due to other limits so we don't use this ... actually it can
+ -- -- prevent a luajittex crash but i don't care too much about that as we can't use
+ -- -- that engine anyway then.
+ --
+ -- local function check(t)
+ -- if type(t) == "table" then
+ -- local s = sortedkeys(t)
+ -- local n = #s
+ -- if n <= 10 then
+ -- return
+ -- end
+ -- local ranges = { }
+ -- local first, last
+ -- for i=1,#s do
+ -- local ti = s[i]
+ -- if not first then
+ -- first = ti
+ -- last = ti
+ -- elseif ti == last + 1 then
+ -- last = ti
+ -- elseif last - first < 10 then
+ -- -- we could permits a few exceptions
+ -- return
+ -- else
+ -- ranges[#ranges+1] = { first, last }
+ -- first, last = nil, nil
+ -- end
+ -- end
+ -- if #ranges > 0 then
+ -- return {
+ -- ranges = ranges
+ -- }
+ -- end
+ -- end
+ -- end
+ --
+ -- local function pack_boolean(v)
+ -- local tag
+ -- local r = check(v)
+ -- if r then
+ -- v = r
+ -- tag = tabstr_normal(v)
+ -- else
+ -- tag = tabstr_boolean(v)
+ -- end
+ -- local ht = h[tag]
+ -- if ht then
+ -- c[ht] = c[ht] + 1
+ -- return ht
+ -- else
+ -- nt = nt + 1
+ -- t[nt] = v
+ -- h[tag] = nt
+ -- c[nt] = 1
+ -- return nt
+ -- end
+ -- end
+
local function pack_final(v)
-- v == number
if c[v] <= criterium then
@@ -1145,6 +1225,25 @@ function readers.pack(data)
end
end
+ local function pack_final_cc(v)
+ -- v == number
+ if c[v] <= criterium then
+ return t[v]
+ else
+ -- compact hash
+ local hv = hh[v]
+ if hv then
+ return hv
+ else
+ ntt = ntt + 1
+ tt[ntt] = t[v]
+ hh[v] = ntt
+ cc[ntt] = c[v]
+ return ntt
+ end
+ end
+ end
+
local function success(stage,pass)
if nt == 0 then
if trace_loading or trace_packing then
@@ -1191,9 +1290,9 @@ function readers.pack(data)
local function packers(pass)
if pass == 1 then
- return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
+ return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed, pack_normal_cc
else
- return pack_final, pack_final, pack_final, pack_final, pack_final
+ return pack_final, pack_final, pack_final, pack_final, pack_final, pack_final_cc
end
end
@@ -1211,15 +1310,13 @@ function readers.pack(data)
return
end
- --
-
for pass=1,2 do
if trace_packing then
report_otf("start packing: stage 1, pass %s",pass)
end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed, pack_normal_cc = packers(pass)
for unicode, description in next, descriptions do
local boundingbox = description.boundingbox
@@ -1259,28 +1356,30 @@ function readers.pack(data)
if kind == "gpos_pair" then
local c = step.coverage
if c then
- if step.format == "kern" then
+ if step.format == "pair" then
for g1, d1 in next, c do
- c[g1] = pack_normal(d1)
+ for g2, d2 in next, d1 do
+ local f = d2[1] if f and f ~= true then d2[1] = pack_indexed(f) end
+ local s = d2[2] if s and s ~= true then d2[2] = pack_indexed(s) end
+ end
end
else
for g1, d1 in next, c do
- for g2, d2 in next, d1 do
- local f = d2[1] if f then d2[1] = pack_indexed(f) end
- local s = d2[2] if s then d2[2] = pack_indexed(s) end
- end
+ c[g1] = pack_normal(d1)
end
end
end
elseif kind == "gpos_single" then
local c = step.coverage
if c then
- if step.format == "kern" then
- step.coverage = pack_normal(c)
- else
+ if step.format == "single" then
for g1, d1 in next, c do
- c[g1] = pack_indexed(d1)
+ if d1 and d1 ~= true then
+ c[g1] = pack_indexed(d1)
+ end
end
+ else
+ step.coverage = pack_normal(c)
end
end
elseif kind == "gpos_cursive" then
@@ -1399,7 +1498,7 @@ function readers.pack(data)
for i=1,#deltas do
local di = deltas[i]
local d = di.deltas
- local r = di.regions
+ -- local r = di.regions
for j=1,#d do
d[j] = pack_indexed(d[j])
end
@@ -1439,7 +1538,7 @@ function readers.pack(data)
report_otf("start packing: stage 2, pass %s",pass)
end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed, pack_normal_cc = packers(pass)
for unicode, description in next, descriptions do
local math = description.math
@@ -1463,9 +1562,7 @@ function readers.pack(data)
if kind == "gpos_pair" then
local c = step.coverage
if c then
- if step.format == "kern" then
- -- todo !
- else
+ if step.format == "pair" then
for g1, d1 in next, c do
for g2, d2 in next, d1 do
d1[g2] = pack_normal(d2)
@@ -1473,11 +1570,22 @@ function readers.pack(data)
end
end
end
--- elseif kind == "gpos_mark2base" or kind == "gpos_mark2mark" or kind == "gpos_mark2ligature" then
--- local c = step.baseclasses
--- for k, v in next, c do
--- c[k] = pack_normal(v)
--- end
+ -- elseif kind == "gpos_cursive" then
+ -- local c = step.coverage -- new
+ -- if c then
+ -- for g1, d1 in next, c do
+ -- c[g1] = pack_normal_cc(d1)
+ -- end
+ -- end
+ elseif kind == "gpos_mark2ligature" then
+ local c = step.baseclasses -- new
+ if c then
+ for g1, d1 in next, c do
+ for g2, d2 in next, d1 do
+ d1[g2] = pack_normal(d2)
+ end
+ end
+ end
end
local rules = step.rules
if rules then
@@ -1525,7 +1633,7 @@ function readers.pack(data)
report_otf("start packing: stage 3, pass %s",pass)
end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed, pack_normal_cc = packers(pass)
local function packthem(sequences)
for i=1,#sequences do
@@ -1539,18 +1647,23 @@ function readers.pack(data)
if kind == "gpos_pair" then
local c = step.coverage
if c then
- if step.format == "kern" then
- -- todo !
- else
+ if step.format == "pair" then
for g1, d1 in next, c do
c[g1] = pack_normal(d1)
end
end
end
+ elseif kind == "gpos_cursive" then
+ local c = step.coverage
+ if c then
+ for g1, d1 in next, c do
+ c[g1] = pack_normal_cc(d1)
+ end
+ end
end
end
end
- end
+ end
end
if sequences then
@@ -1626,6 +1739,15 @@ function readers.unpack(data)
-- end
end
+ -- local function expandranges(t,ranges)
+ -- for i=1,#ranges do
+ -- local r = ranges[i]
+ -- for k=r[1],r[2] do
+ -- t[k] = true
+ -- end
+ -- end
+ -- end
+
local function unpackthem(sequences)
for i=1,#sequences do
local sequence = sequences[i]
@@ -1635,20 +1757,26 @@ function readers.unpack(data)
local features = sequence.features
local flags = sequence.flags
local markclass = sequence.markclass
+ if features then
+ local tv = tables[features]
+ if tv then
+ sequence.features = tv
+ features = tv
+ end
+ for script, feature in next, features do
+ local tv = tables[feature]
+ if tv then
+ features[script] = tv
+ end
+ end
+ end
if steps then
for i=1,#steps do
local step = steps[i]
if kind == "gpos_pair" then
local c = step.coverage
if c then
- if step.format == "kern" then
- for g1, d1 in next, c do
- local tv = tables[d1]
- if tv then
- c[g1] = tv
- end
- end
- else
+ if step.format == "pair" then
for g1, d1 in next, c do
local tv = tables[d1]
if tv then
@@ -1665,29 +1793,41 @@ function readers.unpack(data)
local s = tables[d2[2]] if s then d2[2] = s end
end
end
+ else
+ for g1, d1 in next, c do
+ local tv = tables[d1]
+ if tv then
+ c[g1] = tv
+ end
+ end
end
end
elseif kind == "gpos_single" then
local c = step.coverage
if c then
- if step.format == "kern" then
- local tv = tables[c]
- if tv then
- step.coverage = tv
- end
- else
+ if step.format == "single" then
for g1, d1 in next, c do
local tv = tables[d1]
if tv then
c[g1] = tv
end
end
+ else
+ local tv = tables[c]
+ if tv then
+ step.coverage = tv
+ end
end
end
elseif kind == "gpos_cursive" then
local c = step.coverage
if c then
for g1, d1 in next, c do
+ local tv = tables[d1]
+ if tv then
+ d1 = tv
+ c[g1] = d1
+ end
local f = tables[d1[2]] if f then d1[2] = f end
local s = tables[d1[3]] if s then d1[3] = s end
end
@@ -1695,12 +1835,6 @@ function readers.unpack(data)
elseif kind == "gpos_mark2base" or kind == "gpos_mark2mark" then
local c = step.baseclasses
if c then
--- for k, v in next, c do
--- local tv = tables[v]
--- if tv then
--- c[k] = tv
--- end
--- end
for g1, d1 in next, c do
for g2, d2 in next, d1 do
local tv = tables[d2]
@@ -1722,14 +1856,13 @@ function readers.unpack(data)
elseif kind == "gpos_mark2ligature" then
local c = step.baseclasses
if c then
--- for k, v in next, c do
--- local tv = tables[v]
--- if tv then
--- c[k] = tv
--- end
--- end
for g1, d1 in next, c do
for g2, d2 in next, d1 do
+ local tv = tables[d2] -- new
+ if tv then
+ d2 = tv
+ d1[g2] = d2
+ end
for g3, d3 in next, d2 do
local tv = tables[d2[g3]]
if tv then
@@ -1766,6 +1899,18 @@ function readers.unpack(data)
before[i] = tv
end
end
+ -- for i=1,#before do
+ -- local bi = before[i]
+ -- local tv = tables[bi]
+ -- if tv then
+ -- bi = tv
+ -- before[i] = bi
+ -- end
+ -- local ranges = bi.ranges
+ -- if ranges then
+ -- expandranges(bi,ranges)
+ -- end
+ -- end
end
local after = rule.after
if after then
@@ -1780,6 +1925,18 @@ function readers.unpack(data)
after[i] = tv
end
end
+ -- for i=1,#after do
+ -- local ai = after[i]
+ -- local tv = tables[ai]
+ -- if tv then
+ -- ai = tv
+ -- after[i] = ai
+ -- end
+ -- local ranges = ai.ranges
+ -- if ranges then
+ -- expandranges(ai,ranges)
+ -- end
+ -- end
end
local current = rule.current
if current then
@@ -1794,6 +1951,18 @@ function readers.unpack(data)
current[i] = tv
end
end
+ -- for i=1,#current do
+ -- local ci = current[i]
+ -- local tv = tables[ci]
+ -- if tv then
+ -- ci = tv
+ -- current[i] = ci
+ -- end
+ -- local ranges = ci.ranges
+ -- if ranges then
+ -- expandranges(ci,ranges)
+ -- end
+ -- end
end
-- local lookups = rule.lookups
-- if lookups then
@@ -1813,19 +1982,6 @@ function readers.unpack(data)
end
end
end
- if features then
- local tv = tables[features]
- if tv then
- sequence.features = tv
- features = tv
- end
- for script, feature in next, features do
- local tv = tables[feature]
- if tv then
- features[script] = tv
- end
- end
- end
if order then
local tv = tables[order]
if tv then
@@ -1989,8 +2145,10 @@ local function mergesteps_1(lookup,strict)
return nofsteps - 1
end
-
-local function mergesteps_2(lookup,strict) -- pairs
+local function mergesteps_2(lookup) -- pairs
+ -- this can be tricky as we can have a match on a mark with no marks skip flag
+ -- in which case with multiple steps a hit can prevent a next step while in the
+ -- merged case we can hit differently (a messy font then anyway)
local steps = lookup.steps
local nofsteps = lookup.nofsteps
local first = steps[1]
@@ -2009,9 +2167,9 @@ local function mergesteps_2(lookup,strict) -- pairs
for k, v in next, steps[i].coverage do
local tk = target[k]
if tk then
- for k, v in next, v do
- if not tk[k] then
- tk[k] = v
+ for kk, vv in next, v do
+ if tk[kk] == nil then
+ tk[kk] = vv
end
end
else
@@ -2020,57 +2178,48 @@ local function mergesteps_2(lookup,strict) -- pairs
end
end
lookup.nofsteps = 1
- lookup.steps = { first }
+ lookup.merged = true
+ lookup.steps = { first }
return nofsteps - 1
end
+-- we could have a coverage[first][second] = { } already here (because eventually
+-- we also have something like that after loading)
local function mergesteps_3(lookup,strict) -- marks
local steps = lookup.steps
local nofsteps = lookup.nofsteps
- local first = steps[1]
report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
- local baseclasses = { }
- local coverage = { }
- local used = { }
+ -- check first
+ local coverage = { }
for i=1,nofsteps do
- local offset = i*10
- local step = steps[i]
- for k, v in sortedhash(step.baseclasses) do
- baseclasses[offset+k] = v
- end
- for k, v in next, step.coverage do
- local tk = coverage[k]
+ for k, v in next, steps[i].coverage do
+ local tk = coverage[k] -- { class, { x, y } }
if tk then
- for k, v in next, v do
- if not tk[k] then
- tk[k] = v
- local c = offset + v[1]
- v[1] = c
- if not used[c] then
- used[c] = true
- end
- end
- end
+ report("quitting merge due to multiple checks")
+ return nofsteps
else
coverage[k] = v
- local c = offset + v[1]
- v[1] = c
- if not used[c] then
- used[c] = true
- end
end
end
end
- for k, v in next, baseclasses do
- if not used[k] then
- baseclasses[k] = nil
- report("discarding not used baseclass %i",k)
+ -- merge indeed
+ local first = steps[1]
+ local baseclasses = { } -- let's assume sparse step.baseclasses
+ for i=1,nofsteps do
+ local offset = i*10 -- we assume max 10 classes per step
+ local step = steps[i]
+ for k, v in sortedhash(step.baseclasses) do
+ baseclasses[offset+k] = v
+ end
+ for k, v in next, step.coverage do
+ v[1] = offset + v[1]
end
end
first.baseclasses = baseclasses
first.coverage = coverage
lookup.nofsteps = 1
+ lookup.merged = true
lookup.steps = { first }
return nofsteps - 1
end
@@ -2113,62 +2262,137 @@ local function mergesteps_4(lookup) -- ligatures
return nofsteps - 1
end
+-- so we assume only one cursive entry and exit and even then the first one seems
+-- to win anyway: no exit or entry quite the lookup match and then we take the
+-- next step; this means that we can as well merge them
+
+local function mergesteps_5(lookup) -- cursive
+ local steps = lookup.steps
+ local nofsteps = lookup.nofsteps
+ local first = steps[1]
+ report("merging %a steps of %a lookup %a",nofsteps,lookup.type,lookup.name)
+ local target = first.coverage
+ local hash = nil
+ for k, v in next, target do
+ hash = v[1]
+ break
+ end
+ for i=2,nofsteps do
+ for k, v in next, steps[i].coverage do
+ local tk = target[k]
+ if tk then
+ if not tk[2] then
+ tk[2] = v[2]
+ end
+ if not tk[3] then
+ tk[3] = v[3]
+ end
+ else
+ target[k] = v
+ v[1] = hash
+ end
+ end
+ end
+ lookup.nofsteps = 1
+ lookup.merged = true
+ lookup.steps = { first }
+ return nofsteps - 1
+end
+
local function checkkerns(lookup)
local steps = lookup.steps
local nofsteps = lookup.nofsteps
+ local kerned = 0
for i=1,nofsteps do
local step = steps[i]
if step.format == "pair" then
local coverage = step.coverage
local kerns = true
for g1, d1 in next, coverage do
- if d1[1] ~= 0 or d1[2] ~= 0 or d1[4] ~= 0 then
+ if d1 == true then
+ -- all zero
+ elseif not d1 then
+ -- null
+ elseif d1[1] ~= 0 or d1[2] ~= 0 or d1[4] ~= 0 then
kerns = false
break
end
end
if kerns then
report("turning pairs of step %a of %a lookup %a into kerns",i,lookup.type,lookup.name)
+ local c = { }
for g1, d1 in next, coverage do
- coverage[g1] = d1[3]
+ if d1 and d1 ~= true then
+ c[g1] = d1[3]
+ end
end
- step.format = "kern"
+ step.coverage = c
+ step.format = "move"
+ kerned = kerned + 1
end
end
end
+ return kerned
end
+-- There are several options to optimize but we have this somewhat fuzzy aspect of
+-- advancing (depending on the second of a pair) so we need to retain that information.
+--
+-- We can have:
+--
+-- true, nil|false
+--
+-- which effectively means: nothing to be done and advance to next (so not next of
+-- next) and because coverage should be not overlapping we can wipe these. However,
+-- checking for (true,nil) (false,nil) and omitting them doesn't gain much.
+
+-- Because we pack we cannot mix tables and numbers so we can only turn a whole set in
+-- format kern instead of pair.
+
local function checkpairs(lookup)
local steps = lookup.steps
local nofsteps = lookup.nofsteps
local kerned = 0
- for i=1,nofsteps do
- local step = steps[i]
- if step.format == "pair" then
- local coverage = step.coverage
- local kerns = true
- for g1, d1 in next, coverage do
- for g2, d2 in next, d1 do
- if d2[2] then
- kerns = false
- break
- else
- local v = d2[1]
- if v[1] ~= 0 or v[2] ~= 0 or v[4] ~= 0 then
- kerns = false
- break
- end
+
+ local function onlykerns(step)
+ local coverage = step.coverage
+ for g1, d1 in next, coverage do
+ for g2, d2 in next, d1 do
+ if d2[2] then
+ --- true or { a, b, c, d }
+ return false
+ else
+ local v = d2[1]
+ if v == true then
+ -- all zero
+ elseif v and (v[1] ~= 0 or v[2] ~= 0 or v[4] ~= 0) then
+ return false
end
end
end
- if kerns then
+ end
+ return coverage
+ end
+
+ for i=1,nofsteps do
+ local step = steps[i]
+ if step.format == "pair" then
+ local coverage = onlykerns(step)
+ if coverage then
report("turning pairs of step %a of %a lookup %a into kerns",i,lookup.type,lookup.name)
for g1, d1 in next, coverage do
+ local d = { }
for g2, d2 in next, d1 do
- d1[g2] = d2[1][3]
+ local v = d2[1]
+ if v == true then
+ -- ignore -- d1[g2] = nil
+ elseif v then
+ d[g2] = v[3] -- d1[g2] = v[3]
+ end
end
+ coverage[g1] = d
end
- step.format = "kern"
+ step.format = "move"
kerned = kerned + 1
end
end
@@ -2176,6 +2400,30 @@ local function checkpairs(lookup)
return kerned
end
+local compact_pairs = true
+local compact_singles = true
+
+local merge_pairs = true
+local merge_singles = true
+local merge_substitutions = true
+local merge_alternates = true
+local merge_multiples = true
+local merge_ligatures = true
+local merge_cursives = true
+local merge_marks = true
+
+directives.register("otf.compact.pairs", function(v) compact_pairs = v end)
+directives.register("otf.compact.singles", function(v) compact_singles = v end)
+
+directives.register("otf.merge.pairs", function(v) merge_pairs = v end)
+directives.register("otf.merge.singles", function(v) merge_singles = v end)
+directives.register("otf.merge.substitutions", function(v) merge_substitutions = v end)
+directives.register("otf.merge.alternates", function(v) merge_alternates = v end)
+directives.register("otf.merge.multiples", function(v) merge_multiples = v end)
+directives.register("otf.merge.ligatures", function(v) merge_ligatures = v end)
+directives.register("otf.merge.cursives", function(v) merge_cursives = v end)
+directives.register("otf.merge.marks", function(v) merge_marks = v end)
+
function readers.compact(data)
if not data or data.compacted then
return
@@ -2192,23 +2440,65 @@ function readers.compact(data)
for i=1,#lookups do
local lookup = lookups[i]
local nofsteps = lookup.nofsteps
+ local kind = lookup.type
allsteps = allsteps + nofsteps
if nofsteps > 1 then
- local kind = lookup.type
- if kind == "gsub_single" or kind == "gsub_alternate" or kind == "gsub_multiple" then
- merged = merged + mergesteps_1(lookup)
+ local merg = merged
+ if kind == "gsub_single" then
+ if merge_substitutions then
+ merged = merged + mergesteps_1(lookup)
+ end
+ elseif kind == "gsub_alternate" then
+ if merge_alternates then
+ merged = merged + mergesteps_1(lookup)
+ end
+ elseif kind == "gsub_multiple" then
+ if merge_multiples then
+ merged = merged + mergesteps_1(lookup)
+ end
elseif kind == "gsub_ligature" then
- merged = merged + mergesteps_4(lookup)
+ if merge_ligatures then
+ merged = merged + mergesteps_4(lookup)
+ end
elseif kind == "gpos_single" then
- merged = merged + mergesteps_1(lookup,true)
- checkkerns(lookup)
+ if merge_singles then
+ merged = merged + mergesteps_1(lookup,true)
+ end
+ if compact_singles then
+ kerned = kerned + checkkerns(lookup)
+ end
elseif kind == "gpos_pair" then
- merged = merged + mergesteps_2(lookup,true)
- kerned = kerned + checkpairs(lookup)
+ if merge_pairs then
+ merged = merged + mergesteps_2(lookup)
+ end
+ if compact_pairs then
+ kerned = kerned + checkpairs(lookup)
+ end
elseif kind == "gpos_cursive" then
- merged = merged + mergesteps_2(lookup)
+ if merge_cursives then
+ merged = merged + mergesteps_5(lookup)
+ end
elseif kind == "gpos_mark2mark" or kind == "gpos_mark2base" or kind == "gpos_mark2ligature" then
- merged = merged + mergesteps_3(lookup)
+ if merge_marks then
+ merged = merged + mergesteps_3(lookup)
+ end
+ end
+ if merg ~= merged then
+ lookup.merged = true
+ end
+ elseif nofsteps == 1 then
+ local kern = kerned
+ if kind == "gpos_single" then
+ if compact_singles then
+ kerned = kerned + checkkerns(lookup)
+ end
+ elseif kind == "gpos_pair" then
+ if compact_pairs then
+ kerned = kerned + checkpairs(lookup)
+ end
+ end
+ if kern ~= kerned then
+ -- lookup.kerned = true
end
end
end
@@ -2226,6 +2516,79 @@ function readers.compact(data)
end
end
+local function mergesteps(t,k)
+ if k == "merged" then
+ local merged = { }
+ for i=1,#t do
+ local step = t[i]
+ local coverage = step.coverage
+ for k in next, coverage do
+ local m = merged[k]
+ if m then
+ m[2] = i
+ -- m[#m+1] = step
+ else
+ merged[k] = { i, i }
+ -- merged[k] = { step }
+ end
+ end
+ end
+ t.merged = merged
+ return merged
+ end
+end
+
+local function checkmerge(sequence)
+ local steps = sequence.steps
+ if steps then
+ setmetatableindex(steps,mergesteps)
+ end
+end
+
+local function checkflags(sequence,resources)
+ if not sequence.skiphash then
+ local flags = sequence.flags
+ if flags then
+ local skipmark = flags[1]
+ local skipligature = flags[2]
+ local skipbase = flags[3]
+ local markclass = sequence.markclass
+ local skipsome = skipmark or skipligature or skipbase or markclass or false
+ if skipsome then
+ sequence.skiphash = setmetatableindex(function(t,k)
+ local c = resources.classes[k] -- delayed table
+ local v = c == skipmark
+ or (markclass and c == "mark" and not markclass[k])
+ or c == skipligature
+ or c == skipbase
+ or false
+ t[k] = v
+ return v
+ end)
+ else
+ sequence.skiphash = false
+ end
+ else
+ sequence.skiphash = false
+ end
+ end
+end
+
+local function checksteps(sequence)
+ local steps = sequence.steps
+ if steps then
+ for i=1,#steps do
+ steps[i].index = i
+ end
+ end
+end
+
+if fonts.helpers then
+ fonts.helpers.checkmerge = checkmerge
+ fonts.helpers.checkflags = checkflags
+ fonts.helpers.checksteps = checksteps -- has to happen last
+end
+
function readers.expand(data)
if not data or data.expanded then
return
@@ -2267,6 +2630,11 @@ function readers.expand(data)
end
end
end
+
+ -- using a merged combined hash as first test saves some 30% on ebgaramond and
+ -- about 15% on arabtype .. then moving the a test also saves a bit (even when
+ -- often a is not set at all so that one is a bit debatable
+
local function expandlookups(sequences)
if sequences then
-- we also need to do sublookups
@@ -2274,6 +2642,8 @@ function readers.expand(data)
local sequence = sequences[i]
local steps = sequence.steps
if steps then
+ local nofsteps = sequence.nofsteps
+
local kind = sequence.type
local markclass = sequence.markclass
if markclass then
@@ -2284,7 +2654,8 @@ function readers.expand(data)
sequence.markclass = markclasses[markclass]
end
end
- for i=1,sequence.nofsteps do
+
+ for i=1,nofsteps do
local step = steps[i]
local baseclasses = step.baseclasses
if baseclasses then
@@ -2300,13 +2671,14 @@ function readers.expand(data)
end
local rules = step.rules
if rules then
- local rulehash = { }
+ local rulehash = { n = 0 } -- is contexts in font-ots
local rulesize = 0
local coverage = { }
local lookuptype = sequence.type
+ local nofrules = #rules
step.coverage = coverage -- combined hits
- for nofrules=1,#rules do
- local rule = rules[nofrules]
+ for currentrule=1,nofrules do
+ local rule = rules[currentrule]
local current = rule.current
local before = rule.before
local after = rule.after
@@ -2337,7 +2709,7 @@ function readers.expand(data)
for i=1,#lookups do
local lookups = lookups[i]
if lookups then
- for k, v in next, lookups do
+ for k, v in next, lookups do -- actually this one is indexed
local lookup = sublookups[v]
if lookup then
lookups[k] = lookup
@@ -2352,9 +2724,9 @@ function readers.expand(data)
end
end
if sequence[1] then -- we merge coverage into one
- rulesize = rulesize + 1
- rulehash[rulesize] = {
- nofrules, -- 1
+ sequence.n = #sequence -- tiny speedup
+ local ruledata = {
+ currentrule, -- 1 -- original rule number, only use this for tracing!
lookuptype, -- 2
sequence, -- 3
start, -- 4
@@ -2363,20 +2735,61 @@ function readers.expand(data)
replacements, -- 7
subtype, -- 8
}
- for unic in next, sequence[start] do
- local cu = coverage[unic]
- if not cu then
- coverage[unic] = rulehash -- can now be done cleaner i think
+ --
+ -- possible optimization: per [unic] a rulehash, but beware:
+ -- contexts have unique coverage and chains can have multiple
+ -- hits (rules) per coverage entry
+ --
+ -- so: we can combine multiple steps as well as multiple rules
+ -- but that takes careful checking, in which case we can go the
+ -- step list approach and turn contexts into steps .. in fact,
+ -- if we turn multiple contexts into steps we're already ok as
+ -- steps gets a coverage hash by metatable
+ --
+ rulesize = rulesize + 1
+ rulehash[rulesize] = ruledata
+ rulehash.n = rulesize -- tiny speedup
+ --
+ if true then -- nofrules > 1
+
+ for unic in next, sequence[start] do
+ local cu = coverage[unic]
+ if cu then
+ local n = #cu+1
+ cu[n] = ruledata
+ cu.n = n
+ else
+ coverage[unic] = { ruledata, n = 1 }
+ end
end
+
+ else
+
+ for unic in next, sequence[start] do
+ local cu = coverage[unic]
+ if cu then
+ -- we can have a contextchains with many matches which we
+ -- can actually optimize
+ else
+ coverage[unic] = rulehash
+ end
+ end
+
end
end
end
end
end
+
+ checkmerge(sequence)
+ checkflags(sequence,resources)
+ checksteps(sequence)
+
end
end
end
end
+
expandlookups(sequences)
expandlookups(sublookups)
end
diff --git a/tex/context/base/mkiv/font-pre.mkiv b/tex/context/base/mkiv/font-pre.mkiv
index 9336fa352..d09d871f0 100644
--- a/tex/context/base/mkiv/font-pre.mkiv
+++ b/tex/context/base/mkiv/font-pre.mkiv
@@ -19,7 +19,33 @@
% beware, base mode + dynamics can give weird effects
-% rlig ccmp
+% frac : with numr dnom
+%
+% vkrn valt vert vrt2 vpal : when vertical
+%
+% rtlm rtla : in r2l runs
+% ltrm ltra : in l2r runs
+%
+% rvrn : variable fonts
+%
+% rtbd lfbd : opbd
+%
+% rkrf rphf vatu vjmo tjmo rclt psts pstf ljmo haln
+% pres pref nukt
+% abvs abvm blwm blws cjct blwf akhn (indic)
+% half
+% abvf cfar (khmer)
+%
+% ccmp locl calt clig liga rlig
+%
+% mkmk mark kern (palt pwid) curs (by choice but some fonts need it)
+%
+% init medi isol fina (unicode)
+% fin3 fin2 med2 : syriac
+%
+% cpsp : percentage spacing (todo)
+%
+% dtls flac :math
\definefontfeature
[always]
@@ -27,7 +53,10 @@
script=auto, % on speed; 'base' just doesn't play well with dynamics; some day we can even
autoscript=position,
autolanguage=position,
+% ccmp=yes,
kern=yes, % consider skipping the base passes when no base mode is used
+% palt=yes,
+% pwid=yes,
mark=yes,
mkmk=yes,
curs=yes]
@@ -36,6 +65,11 @@
[default]
[always]
[liga=yes,
+% ccmp=yes, % maybe too
+% locl=yes, % maybe too
+% calt=yes, % maybe too
+% clig=yes, % maybe too
+% rlig=yes, % maybe too
tlig=yes,
trep=yes] % texligatures=yes,texquotes=yes
@@ -101,6 +135,11 @@
[always]
[compose=yes,
liga=yes,
+% ccmp=yes,
+% locl=yes,
+% calt=yes,
+% clig=yes,
+% rlig=yes,
tlig=yes,
trep=yes]
@@ -114,6 +153,7 @@
[mode=node,analyze=yes,language=dflt,ccmp=yes,
autoscript=position,autolanguage=position,
init=yes,medi=yes,fina=yes,isol=yes,
+ % fin2=yes,fin3=yes,med2=yes,
mark=yes,mkmk=yes,kern=yes,curs=yes,
liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes]
@@ -122,6 +162,7 @@
[mode=node,analyze=yes,language=dflt,ccmp=yes,
autoscript=position,autolanguage=position,
init=yes,medi=yes,fina=yes,isol=yes,
+ % fin2=yes,fin3=yes,med2=yes,
mark=yes,mkmk=yes,kern=yes,curs=yes,
rlig=yes,calt=yes]
@@ -266,7 +307,8 @@
\fi
\ifdefined\mathitalicsmode
- \mathitalicsmode\plusone % experiment
+ \mathitalicsmode\plusone % simple noads become zero
+ % \mathitalicsmode\plustwo % idem but inner is kept (for testing)
\fi
% \adaptfontfeature[*math*][mathnolimitsmode=1000] % only subscript
@@ -348,17 +390,6 @@
%D We define some colors that are used in tracing (for instance \OPENTYPE\
%D features). We cannot yet inherit because no colors are predefined.
-\definecolor[trace:0][s=.4]
-\definecolor[trace:1][r=.6]
-\definecolor[trace:2][g=.6]
-\definecolor[trace:3][b=.6]
-\definecolor[trace:4][r=.6,g=.6]
-\definecolor[trace:5][r=.6,b=.6]
-\definecolor[trace:6][g=.6,b=.6]
-\definecolor[trace:7][r=.8,g=.4]
-\definecolor[trace:8][r=.8,b=.4]
-\definecolor[trace:9][g=.4,b=.8]
-
\definecolor[font:init][r=.75]
\definecolor[font:medi][g=.75]
\definecolor[font:fina][b=.75]
@@ -366,26 +397,6 @@
\definecolor[font:mark][r=.75,b=.75] % [m=.75]
\definecolor[font:rest][b=.75,g=.75] % [c=.75]
-\definecolor[trace:w][s=1]
-\definecolor[trace:r][r=.75,t=.5,a=1]
-\definecolor[trace:g][g=.75,t=.5,a=1]
-\definecolor[trace:b][b=.75,t=.5,a=1]
-\definecolor[trace:c][c=.75,t=.5,a=1]
-\definecolor[trace:m][m=.75,t=.5,a=1]
-\definecolor[trace:y][y=.75,t=.5,a=1]
-\definecolor[trace:s][s=.75,t=.5,a=1]
-\definecolor[trace:o][r=1,g=.6,b=.1,t=.5,a=1]
-
-\definecolor[trace:dw][s=1]
-\definecolor[trace:dr][r=.75,t=.75,a=1]
-\definecolor[trace:dg][g=.75,t=.75,a=1]
-\definecolor[trace:db][b=.75,t=.75,a=1]
-\definecolor[trace:dc][c=.75,t=.75,a=1]
-\definecolor[trace:dm][m=.75,t=.75,a=1]
-\definecolor[trace:dy][y=.75,t=.75,a=1]
-\definecolor[trace:ds][s=.75,t=.75,a=1]
-\definecolor[trace:do][r=1,g=.6,b=.1,t=.75,a=1]
-
\definecolor[font:0] [s=1]
\definecolor[font:1] [r=.75]
\definecolor[font:2] [g=.75]
@@ -607,7 +618,7 @@
\definealternativestyle [\v!bigger] [\setbigbodyfont \tf] []
\definealternativestyle [\v!smaller] [\setsmallbodyfont\tf] []
-\definealternativestyle [\v!sans,\v!sansserif] [\ss] []
+\definealternativestyle [\v!sans,\v!sansserif] [\ss]
\definealternativestyle [\v!roman,\v!serif,\v!regular] [\rm]
\definealternativestyle [\v!handwritten] [\hw]
\definealternativestyle [\v!calligraphic] [\cg]
@@ -619,6 +630,12 @@
\definealternativestyle [\v!mononormal] [\tt\tf] []
\definealternativestyle [\v!monobold] [\tt\bf] []
+\definealternativestyle [typeface] [\typeface] [] % no translation here (quite basic)
+\definealternativestyle [boldface] [\boldface] []
+\definealternativestyle [slantedface] [\slantedface] []
+\definealternativestyle [italicface] [\italicface] []
+\definealternativestyle [swapface] [\swapface] []
+
% For Alan:
\definealternativestyle
@@ -689,6 +706,7 @@
\definefontfeature[f:oldstyle] [onum=yes]
\definefontfeature[f:tabular] [tnum=yes]
\definefontfeature[f:superiors][sups=yes]
+\definefontfeature[f:inferiors][subs=yes]
\definefontfeature[f:fractions][frac=yes]
\definefontfeature[f:kern] [kern=yes]
\definefontfeature[f:kerns] [kern=yes]
@@ -732,11 +750,15 @@
%D don't want huge switches to the main bodyfont and style, so
%D here too we use a direct method.
-\let\infofont \relax % satisfy dep checker
-\let\infofontbold\relax % satisfy dep checker
+\let\infofont \relax
+\let\infofontbold \relax
+\let\smallinfofont \relax
+\let\smallinfofontbold\relax
-\definefont[infofont] [file:dejavusansmono at 6pt] % todo \the\everybodyfont
-\definefont[infofontbold][file:dejavusansmono-bold at 6pt] % todo \the\everybodyfont
+\definefont[infofont] [file:dejavusansmono*none at 6pt]
+\definefont[infofontbold] [file:dejavusansmono-bold*none at 6pt]
+\definefont[smallinfofont] [file:dejavusansmono*none at 3pt]
+\definefont[smallinfofontbold][file:dejavusansmono-bold*none at 3pt]
%D Optimization (later we overload in math). Also needed in order to get \type {\ss}
%D properly defined.
diff --git a/tex/context/base/mkiv/font-run.mkiv b/tex/context/base/mkiv/font-run.mkiv
index ebb3a576c..610f2e62c 100644
--- a/tex/context/base/mkiv/font-run.mkiv
+++ b/tex/context/base/mkiv/font-run.mkiv
@@ -202,11 +202,13 @@
\scratchcounterthree\numexpr\charplane*256+\scratchcounter\relax
\iffontchar\font\scratchcounterthree
\setbox\scratchbox\ruledhpack{\char\scratchcounterthree}%
+ \bgroup
\tf
\startoverlay
{\wrapbox\scratchbox}
{\textbox\scratchnum}
\stopoverlay
+ \egroup
\else
\copy\scratchboxthree
\fi}}}%
diff --git a/tex/context/base/mkiv/font-sel.lua b/tex/context/base/mkiv/font-sel.lua
index b4dd9a555..0cf51cad3 100644
--- a/tex/context/base/mkiv/font-sel.lua
+++ b/tex/context/base/mkiv/font-sel.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['font-sel'] = {
license = "GNU General Public License"
}
+local next, type = next, type
+
local context = context
local cleanname = fonts.names.cleanname
local gsub, splitup, find, lower = string.gsub, string.splitup, string.find, string.lower
diff --git a/tex/context/base/mkiv/font-sel.mkvi b/tex/context/base/mkiv/font-sel.mkvi
index a78742928..87617d2d1 100644
--- a/tex/context/base/mkiv/font-sel.mkvi
+++ b/tex/context/base/mkiv/font-sel.mkvi
@@ -10,7 +10,7 @@
\writestatus{loading}{ConTeXt User Module / Selectfont}
-\registerctxluafile{font-sel}{1.001}
+\registerctxluafile{font-sel}{}
\unprotect
diff --git a/tex/context/base/mkiv/font-shp.lua b/tex/context/base/mkiv/font-shp.lua
index 6e21848a4..75a12ac82 100644
--- a/tex/context/base/mkiv/font-shp.lua
+++ b/tex/context/base/mkiv/font-shp.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['font-shp'] = {
license = "see context related readme files"
}
-local tonumber = tonumber
+local tonumber, next = tonumber, next
local concat = table.concat
local formatters = string.formatters
@@ -362,7 +362,7 @@ local function addvariableshapes(tfmdata,key,value)
-- we need inline in order to support color
local bt, et = getactualtext(char.tounicode or char.unicode or unicode)
char.commands = {
- { "special", "pdf:" .. segmentstopdf(segments,factor,bt,et) }
+ { "pdf", "origin", segmentstopdf(segments,factor,bt,et) }
}
end
end
diff --git a/tex/context/base/mkiv/font-sol.lua b/tex/context/base/mkiv/font-sol.lua
index 82fc3dc40..8967d88e6 100644
--- a/tex/context/base/mkiv/font-sol.lua
+++ b/tex/context/base/mkiv/font-sol.lua
@@ -67,6 +67,7 @@ local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getdir = nuts.getdir
local getwidth = nuts.getwidth
+local getboxglue = nuts.getboxglue
local setattr = nuts.setattr
local setlink = nuts.setlink
@@ -336,7 +337,6 @@ local splitter_one = usernodeids["splitters.one"]
local splitter_two = usernodeids["splitters.two"]
local a_word = attributes.private('word')
-local a_fontkern = attributes.private('fontkern')
local encapsulate = false
@@ -395,7 +395,7 @@ function splitters.split(head)
if m > max_more then max_more = m end
start, stop, done = nil, nil, true
end
- while current do -- also nextid
+ while current do -- also ischar
local next = getnext(current)
local id = getid(current)
if id == glyph_code then
@@ -500,7 +500,7 @@ local function collect_words(list) -- can be made faster for attributes
report_splitters("skipped: %C",current.char)
end
end
- elseif id == kern_code and (getsubtype(current) == fontkern_code or getattr(current,a_fontkern)) then
+ elseif id == kern_code and getsubtype(current) == fontkern_code then
if first then
last = current
else
@@ -725,9 +725,7 @@ variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
end
local function show_quality(current,what,line)
- local set = getfield(current,"glue_set")
- local sign = getfield(current,"glue_sign")
- local order = getfield(current,"glue_order")
+ local set, order, sign = getboxglue(current)
local amount = set * ((sign == 2 and -1) or 1)
report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
end
diff --git a/tex/context/base/mkiv/font-sol.mkvi b/tex/context/base/mkiv/font-sol.mkvi
index d065b78ea..c908001ff 100644
--- a/tex/context/base/mkiv/font-sol.mkvi
+++ b/tex/context/base/mkiv/font-sol.mkvi
@@ -72,7 +72,7 @@
%D \disabletrackers[parbuilders.solutions.splitters.colors]
%D \stoptyping
-\registerctxluafile{font-sol}{1.001}
+\registerctxluafile{font-sol}{}
\unprotect
diff --git a/tex/context/base/mkiv/font-sty.mkvi b/tex/context/base/mkiv/font-sty.mkvi
index cf49cd5eb..2d00c5ec8 100644
--- a/tex/context/base/mkiv/font-sty.mkvi
+++ b/tex/context/base/mkiv/font-sty.mkvi
@@ -18,34 +18,33 @@
%D \macros
%D {definealternativestyle}
%D
-%D In the main modules we are going to implement lots of
-%D parameterized commands and one of these parameters will
-%D concern the font to use. To suit consistent use of fonts we
-%D here implement a mechanism for defining the keywords that
-%D present a particular style or alternative.
+%D In the main modules we are going to implement lots of parameterized commands and
+%D one of these parameters will concern the font to use. To suit consistent use of
+%D fonts we here implement a mechanism for defining the keywords that present a
+%D particular style or alternative.
%D
%D \starttyping
%D \definealternativestyle [keywords] [\style] [\nostyle]
%D \stoptyping
%D
-%D The first command is used in the normal textflow, while the
-%D second command takes care of headings and alike. Consider
-%D the next two definitions:
+%D The first command is used in the normal textflow, while the second command takes
+%D care of headings and alike. Consider the next two definitions:
%D
%D \starttyping
%D \definealternativestyle [bold] [\bf] []
%D \definealternativestyle [cap] [\cap] [\cap]
%D \stoptyping
%D
-%D A change \type{\bf} in a heading which is to be set in
-%D \type{\tfd} does not look that well, so therefore we leave
-%D the second argument of \type{\definealternativestyle} empty.
-%D When we capatalize characters using the pseudo small cap
-%D command \type{\cap}, we want this to take effect in both
-%D text and headings, which is accomplished by assigning both
-%D arguments.
+%D A change \type {\bf} in a heading which is to be set in \type {\tfd} does not look
+%D that well, so therefore we leave the second argument of \type
+%D {\definealternativestyle} empty. When we capatalize characters using the pseudo
+%D small cap command \type {\cap}, we want this to take effect in both text and
+%D headings, which is accomplished by assigning both arguments.
-\installcorenamespace{alternativestyles}
+\installcorenamespace{alternativestyles} % settings
+\installcorenamespace{alternativestyle} % instances
+
+\installsetuponlycommandhandler \??alternativestyles {alternativestyles}
\setnewconstant \c_font_current_alternative_style_index \plusone
@@ -57,13 +56,21 @@
\let\definestyle\definealternativestyle % later redefined
+\newconstant\c_fonts_basics_alternative_style_method
+
\def\font_basics_define_alternative_style_indeed#variantone#varianttwo#command%
- {\ifcsname#command\endcsname
+ {\setvalue{\??alternativestyle#command}{\font_helpers_apply_alternative_style{#variantone}{#varianttwo}}%
+ \ifcsname#command\endcsname
% no redefinition
+ \else\ifnum\c_fonts_basics_alternative_style_method=\plusone
+ \ifthirdargument
+ \setuevalue{#command}{\groupedcommand{\expandafter\noexpand\begincsname\??alternativestyle#command\endcsname}{}}%
+ \else
+ \setuvalue{#command}{\groupedcommand{#variantone}{}}%
+ \fi
\else
\setuvalue{#command}{\groupedcommand{#variantone}{}}%
- \fi
- \setvalue{\??alternativestyles#command}{\font_helpers_apply_alternative_style{#variantone}{#varianttwo}}}%
+ \fi\fi}
\def\font_helpers_apply_alternative_style
{\ifcase\c_font_current_alternative_style_index
@@ -77,18 +84,24 @@
\fi}
\def\applyalternativestyle#name% public
- {\begincsname\??alternativestyles#name\endcsname}
+ {\begincsname\??alternativestyle#name\endcsname}
+
+\appendtoks
+ \doifelse{\alternativestylesparameter\c!method}\v!auto
+ {\c_fonts_basics_alternative_style_method\plusone}%
+ {\c_fonts_basics_alternative_style_method\zerocount}%
+\to \everysetupalternativestyles
-%D Maybe too geneneric, but probably ok is the following. (Maybe one
-%D day we will use a dedicated grouped command for styles.)
+%D Maybe too generic, but probably ok is the following. (Maybe one day we will use a
+%D dedicated grouped command for styles.)
% \appendtoks
% \let\groupedcommand\thirdofthreearguments
% \to \everysimplifycommands
-%D This command also defines the keyword as command. This means
-%D that the example definition of \type{bold} we gave before,
-%D results in a command \type{\bold} which can be used as:
+%D This command also defines the keyword as command. This means that the example
+%D definition of \type {bold} we gave before, results in a command \type {\bold}
+%D which can be used as:
%D
%D \startbuffer
%D He's a \bold{bold} man with a {\bold head}.
@@ -102,39 +115,33 @@
%D \definealternativestyle[bold][\bf][]\getbuffer
%D \stopexample
%D
-%D Such definitions are of course unwanted for \type{\cap}
-%D because this would result in an endless recursive call.
-%D Therefore we check on the existance of both the command and
-%D the substitution. The latter is needed because for instance
-%D \type{\type} is an entirely diferent command. That command
-%D handles verbatim, while the style command would just switch
-%D to teletype font. This is just an example of a tricky
-%D naming coincidence.
-
+%D Such definitions are of course unwanted for \type {\cap} because this would
+%D result in an endless recursive call. Therefore we check on the existance of both
+%D the command and the substitution. The latter is needed because for instance \type
+%D {\type} is an entirely diferent command. That command handles verbatim, while the
+%D style command would just switch to teletype font. This is just an example of a
+%D tricky naming coincidence.
+%D
%D \macros
%D {doconvertfont,noconvertfont,
%D dontconvertfont,redoconvertfont}
%D
-%D After having defined such keywords, we can call for them by
-%D using
+%D After having defined such keywords, we can call for them by using
%D
%D \starttyping
%D \doconvertfont{keyword}{text}
%D \stoptyping
%D
-%D We deliberately pass an argument. This enables us to
-%D assign converters that handle one argument, like
-%D \type{\cap}.
+%D We deliberately pass an argument. This enables us to assign converters that
+%D handle one argument, like \type {\cap}.
%D
-%D By default the first specification is used to set the style,
-%D exept when we say \type{\dontconvertfont}, after which the
-%D second specification is used. We can also directly call for
-%D \type{\noconvertfont}. In nested calls, we can restore the
-%D conversion by saying \type{\redoconvertfont}.
-
-%D These commands are not grouped! Grouping is most probably
-%D done by the calling macro's and would lead to unnecessary
-%D overhead.
+%D By default the first specification is used to set the style, exept when we say
+%D \type {\dontconvertfont}, after which the second specification is used. We can
+%D also directly call for \type {\noconvertfont}. In nested calls, we can restore
+%D the conversion by saying \type {\redoconvertfont}.
+%D
+%D These commands are not grouped! Grouping is most probably done by the calling
+%D macro's and would lead to unnecessary overhead.
\let\m_current_convert_font \empty
\let\m_current_convert_font_dt\empty
@@ -149,7 +156,7 @@
\def\font_helpers_do_convert_font
{\edef\m_current_convert_font_dt{\detokenize\expandafter{\m_current_convert_font}}%
- \ifcsname\??alternativestyles\m_current_convert_font_dt\endcsname
+ \ifcsname\??alternativestyle\m_current_convert_font_dt\endcsname
\expandafter\lastnamedcs
\else\ifcsname\m_current_convert_font_dt\endcsname
\doubleexpandafter\lastnamedcs
@@ -157,8 +164,8 @@
\doubleexpandafter\m_current_convert_font
\fi\fi}
-%D Low level switches (downward compatible, but we keep them as one can use
-%D them in styles):
+%D Low level switches (downward compatible, but we keep them as one can use them in
+%D styles):
%D
%D \starttyping
%D \usemodule[abr-02]
@@ -188,7 +195,7 @@
\unexpanded\def\dousecurrentstyleparameter % empty check outside here
{\edef\detokenizedstyleparameter{\detokenize\expandafter{\currentstyleparameter}}%
\settrue\fontattributeisset % reset is done elsewhere
- \ifcsname\??alternativestyles\detokenizedstyleparameter\endcsname
+ \ifcsname\??alternativestyle\detokenizedstyleparameter\endcsname
\lastnamedcs
\else\ifcsname\detokenizedstyleparameter\endcsname
\lastnamedcs
@@ -392,8 +399,8 @@
\unexpanded\edef\vsone#character{#character\normalUchar"FE00 } % used
\unexpanded\edef\vstwo#character{#character\normalUchar"FE01 } % not used but handy for testing
-%D For historic reasons we keep the following around but they are no longer
-%D that relevant for \MKIV.
+%D For historic reasons we keep the following around but they are no longer that
+%D relevant for \MKIV.
\unexpanded\def\doattributes#1#2#3#4%
{\begingroup % geen \bgroup, anders in mathmode lege \hbox
diff --git a/tex/context/base/mkiv/font-syn.lua b/tex/context/base/mkiv/font-syn.lua
index c4dcf0bcd..52f425db3 100644
--- a/tex/context/base/mkiv/font-syn.lua
+++ b/tex/context/base/mkiv/font-syn.lua
@@ -14,9 +14,11 @@ if not modules then modules = { } end modules ['font-syn'] = {
-- old ff loader: 140 sec
-- new lua loader: 5 sec
+-- maybe find(...,strictname,1,true)
+
local next, tonumber, type, tostring = next, tonumber, type, tostring
local sub, gsub, match, find, lower, upper = string.sub, string.gsub, string.match, string.find, string.lower, string.upper
-local concat, sort, fastcopy = table.concat, table.sort, table.fastcopy
+local concat, sort, fastcopy, tohash = table.concat, table.sort, table.fastcopy, table.tohash
local serialize, sortedhash = table.serialize, table.sortedhash
local lpegmatch = lpeg.match
local unpack = unpack or table.unpack
@@ -35,6 +37,7 @@ local splitname = file.splitname
local basename = file.basename
local nameonly = file.nameonly
local pathpart = file.pathpart
+local suffixonly = file.suffix
local filejoin = file.join
local is_qualified_path = file.is_qualified_path
local exists = io.exists
@@ -393,11 +396,11 @@ filters.ttc = filters.otf
-- local hash = { }
-- local okay = false
-- for line in f:lines() do -- slow but only a few lines at the beginning
--- if find(line,"dict begin") then
+-- if find(line,"dict begin",1,true) then
-- okay = true
-- elseif not okay then
-- -- go on
--- elseif find(line,"currentdict end") then
+-- elseif find(line,"currentdict end",1,true) then
-- break
-- else
-- local key, value = lpegmatch(p_entry,line)
@@ -423,8 +426,10 @@ filters.list = {
-- to be considered: loop over paths per list entry (so first all otf ttf etc)
-names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature
-names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc
+names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature
+names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc
+names.extrafontsvariable = "EXTRAFONTS" -- the official way, in minimals etc
+names.runtimefontsvariable = "RUNTIMEFONTS" -- the official way, in minimals etc
filters.paths = { }
filters.names = { }
@@ -436,7 +441,7 @@ function names.getpaths(trace)
local v = cleanpath(t[i])
v = gsub(v,"/+$","") -- not needed any more
local key = lower(v)
- report_names("%a specifies path %a",where,v)
+ report_names("variable %a specifies path %a",where,v)
if not hash[key] then
r = r + 1
result[r] = v
@@ -448,6 +453,10 @@ function names.getpaths(trace)
if path ~= "" then
collect(resolvers.expandedpathlist(path),path)
end
+ local path = names.extrafontsvariable or ""
+ if path ~= "" then
+ collect(resolvers.expandedpathlist(path),path)
+ end
if xml then
local confname = resolvers.expansion("FONTCONFIG_FILE") or ""
if confname == "" then
@@ -539,23 +548,6 @@ names.cleanfilename = cleanfilename
-- return result
-- end
-local function walk_tree(pathlist,suffix,identify)
- if pathlist then
- for i=1,#pathlist do
- local path = pathlist[i]
- path = cleanpath(path .. "/")
- path = gsub(path,"/+","/")
- local pattern = path .. "**." .. suffix -- ** forces recurse
- report_names("globbing path %a",pattern)
- local t = dir.glob(pattern)
- sort(t,sorter)
- for j=1,#t do
- local completename = t[j]
- identify(completename,basename(completename),suffix,completename)
- end
- end
- end
-end
local function check_name(data,result,filename,modification,suffix,subfont)
-- shortcuts
@@ -1002,9 +994,11 @@ local function unpackreferences()
end
local function analyzefiles(olddata)
+
if not trace_warnings then
report_names("warnings are disabled (tracker 'fonts.warnings')")
end
+
local data = names.data
local done = { }
local totalnofread = 0
@@ -1020,6 +1014,26 @@ local function analyzefiles(olddata)
local oldspecifications = olddata and olddata.specifications or { }
local oldrejected = olddata and olddata.rejected or { }
local treatmentdata = treatments.data or { } -- when used outside context
+ ----- walked = setmetatableindex("number")
+
+ local function walk_tree(pathlist,suffix,identify)
+ if pathlist then
+ for i=1,#pathlist do
+ local path = pathlist[i]
+ path = cleanpath(path .. "/")
+ path = gsub(path,"/+","/")
+ local pattern = path .. "**." .. suffix -- ** forces recurse
+ report_names("globbing path %a",pattern)
+ local t = dir.glob(pattern)
+ sort(t,sorter)
+ for j=1,#t do
+ local completename = t[j]
+ identify(completename,basename(completename),suffix,completename)
+ end
+ -- walked[path] = walked[path] + #t
+ end
+ end
+ end
local function identify(completename,name,suffix,storedname)
local pathpart, basepart = splitbase(completename)
@@ -1123,6 +1137,7 @@ local function analyzefiles(olddata)
end
logs.flush() -- a bit overkill for each font, maybe not needed here
end
+
local function traverse(what, method)
local list = filters.list
for n=1,#list do
@@ -1141,7 +1156,9 @@ local function analyzefiles(olddata)
end
logs.flush()
end
+
-- problem .. this will not take care of duplicates
+
local function withtree(suffix)
resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name)
if method == "file" or method == "tree" then
@@ -1158,16 +1175,20 @@ local function analyzefiles(olddata)
report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done)
end)
end
+
local function withlsr(suffix) -- all trees
-- we do this only for a stupid names run, not used for context itself,
-- using the vars is too clumsy so we just stick to a full scan instead
local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "")
walk_tree(pathlist,suffix,identify)
end
+
local function withsystem(suffix) -- OSFONTDIR cum suis
walk_tree(names.getpaths(trace),suffix,identify)
end
+
traverse("tree",withtree) -- TEXTREE only
+
if not usesystemfonts then
report_names("ignoring system fonts")
elseif texconfig.kpse_init then
@@ -1175,9 +1196,15 @@ local function analyzefiles(olddata)
else
traverse("system", withsystem)
end
+
data.statistics.readfiles = totalnofread
data.statistics.skippedfiles = totalnofskipped
data.statistics.duplicatefiles = totalnofduplicates
+
+ -- for k, v in sortedhash(walked) do
+ -- report_names("%s : %i",k,v)
+ -- end
+
end
local function addfilenames()
@@ -1492,10 +1519,54 @@ end
-- end
-- end
+local runtimefiles = { }
+local runtimedone = false
+
+local function addruntimepath(path)
+ names.load()
+ local paths = type(path) == "table" and path or { path }
+ local suffixes = tohash(filters.list)
+ for i=1,#paths do
+ local path = resolveprefix(paths[i])
+ if path ~= "" then
+ local list = dir.glob(path.."/*")
+ for i=1,#list do
+ local fullname = list[i]
+ local suffix = lower(suffixonly(fullname))
+ if suffixes[suffix] then
+ local c = cleanfilename(fullname)
+ runtimefiles[c] = fullname
+ if trace_names then
+ report_names("adding runtime filename %a for %a",c,fullname)
+ end
+ end
+ end
+ end
+ end
+end
+
+local function addruntimefiles(variable)
+ local paths = variable and resolvers.expandedpathlistfromvariable(variable)
+ if paths and #paths > 0 then
+ addruntimepath(paths)
+ end
+end
+
+names.addruntimepath = addruntimepath
+names.addruntimefiles = addruntimefiles
+
function names.getfilename(askedname,suffix) -- last resort, strip funny chars
+ if not runtimedone then
+ addruntimefiles(names.runtimefontsvariable)
+ runtimedone = true
+ end
+ local cleanname = cleanfilename(askedname,suffix)
+ local found = runtimefiles[cleanname]
+ if found then
+ return found
+ end
names.load()
local files = names.data.files
- local cleanname = cleanfilename(askedname,suffix)
local found = files and files[cleanname] or ""
if found == "" and is_reloaded() then
files = names.data.files
diff --git a/tex/context/base/mkiv/font-tfm.lua b/tex/context/base/mkiv/font-tfm.lua
index 6584190ce..0059e6296 100644
--- a/tex/context/base/mkiv/font-tfm.lua
+++ b/tex/context/base/mkiv/font-tfm.lua
@@ -244,6 +244,8 @@ local function read_from_tfm(specification)
--
constructors.enhanceparameters(parameters) -- official copies for us
--
+ properties.private = properties.private or tfmdata.private or privateoffset
+ --
if newtfmdata then
--
-- We do nothing as we assume flat tfm files. It would become real messy
@@ -436,7 +438,7 @@ do
local originals = tfmdata.characters
local indices = { }
local parentfont = { "font", 1 }
- local private = fonts.constructors.privateoffset
+ local private = tfmdata or fonts.constructors.privateoffset
local reported = encdone[tfmfile][encfile]
-- create characters table
@@ -514,6 +516,7 @@ do
tfmdata.tounicode = 1
tfmdata.embedding = "subset"
tfmdata.usedbitmap = bitmap and virtualid
+ tfmdata.private = private
return tfmdata
end
@@ -548,7 +551,9 @@ end
local flushstreamobject = lpdf and lpdf.flushstreamobject
local setfontattributes = pdf.setfontattributes
- if not flushstreamobject then
+ if flushstreamobject then
+ -- we're in context
+ else
flushstreamobject = function(data)
return pdf.obj {
immediate = true,
diff --git a/tex/context/base/mkiv/font-tra.mkiv b/tex/context/base/mkiv/font-tra.mkiv
index 38b172ba6..c51ba78fc 100644
--- a/tex/context/base/mkiv/font-tra.mkiv
+++ b/tex/context/base/mkiv/font-tra.mkiv
@@ -32,7 +32,7 @@
%D The implementation is rather straightforward in using
%D \type{\halign}.
-\fetchruntimecommand \showbodyfont {\f!fontprefix\s!run}
+\fetchruntimecommand \showbodyfont \f!font_run
%D \macros
%D {showfontstrip, testminimalbaseline, showminimalbaseline}
@@ -43,9 +43,9 @@
%D
%D \showfontstrip \blank \showminimalbaseline
-\fetchruntimecommand \showfontstrip {\f!fontprefix\s!run}
-\fetchruntimecommand \testminimalbaseline {\f!fontprefix\s!run}
-\fetchruntimecommand \showminimalbaseline {\f!fontprefix\s!run}
+\fetchruntimecommand \showfontstrip \f!font_run
+\fetchruntimecommand \testminimalbaseline \f!font_run
+\fetchruntimecommand \showminimalbaseline \f!font_run
%D \macros
%D {showkerning}
@@ -54,7 +54,7 @@
%D
%D \showkerning{Can you guess what kerning is?}
-\fetchruntimecommand \showkerning {\f!fontprefix\s!run}
+\fetchruntimecommand \showkerning \f!font_run
%D \macros
%D {showbodyfontenvironment,showfont,showfontstyle,showligatures}
@@ -67,7 +67,7 @@
%D
%D \showsetup{showbodyfontenvironment}
-\fetchruntimecommand \showbodyfontenvironment {\f!fontprefix\s!run}
+\fetchruntimecommand \showbodyfontenvironment \f!font_run
%D
%D The following command generates a fontmap:
@@ -79,12 +79,12 @@
%D \typebuffer
%D \getbuffer
-\fetchruntimecommand \showfont {\f!fontprefix\s!run}
-\fetchruntimecommand \showfontstyle {\f!fontprefix\s!run}
-\fetchruntimecommand \showligature {\f!fontprefix\s!run}
-\fetchruntimecommand \showligatures {\f!fontprefix\s!run}
-\fetchruntimecommand \showcharratio {\f!fontprefix\s!run}
-\fetchruntimecommand \showfontparameters {\f!fontprefix\s!run}
+\fetchruntimecommand \showfont \f!font_run
+\fetchruntimecommand \showfontstyle \f!font_run
+\fetchruntimecommand \showligature \f!font_run
+\fetchruntimecommand \showligatures \f!font_run
+\fetchruntimecommand \showcharratio \f!font_run
+\fetchruntimecommand \showfontparameters \f!font_run
\unexpanded\def\showchardata #1{\ctxcommand{showchardata("#1")}}
\unexpanded\def\showfontdata {\ctxcommand{showfontparameters()}}
@@ -124,7 +124,18 @@
\unexpanded\def\otfstepcharcommand#1#2#3% font char class
{\otfstepspace
- \doif{#3}{mark}{\underbar}{U+\hexnumber{#2}}:\ruledhbox{\ctxlua{nodes.tracers.fontchar(#1,#2)}}%
+ \doif{#3}{mark}{\underbar}{U+\hexnumber{#2}}:%
+ \setbox\scratchbox\hbox{\ctxlua{nodes.tracers.fontchar(#1,#2)}}%
+ \ifdim\wd\scratchbox=\zeropoint
+ \scratchwidth.125\onepoint
+ \scratchdistance\dimexpr(\emwidth/2-\scratchwidth)\relax
+ \kern\scratchdistance
+ \ruledhbox to \scratchwidth{\hss\box\scratchbox\hss}%
+ \kern-\scratchwidth
+ \hskip\scratchdistance
+ \else
+ \ruledhbox{\box\scratchbox}%
+ \fi
\otfstepspace}
\unexpanded\def\otfstepfontcommand#1#2#3% id font size
@@ -187,6 +198,41 @@
% \blank}%
% \endgroup}
+\newconstant\showotfstepsmode \showotfstepsmode\plusfour
+
+\unexpanded\def\showotfsteps_n
+ {\blank
+ \begingroup
+ \advance\leftskip6\emwidth
+ \showotfstepmessages\recurselevel
+ \par
+ \endgroup
+ \blank
+ \dontleavehmode
+ \hbox to \hsize \bgroup
+ \hbox to 6\emwidth \bgroup
+ \bf
+ \ifnum\recurselevel=\scratchcounter result\else step \recurselevel\fi
+ \hss
+ \egroup
+ \vtop \bgroup
+ \hsize\dimexpr\hsize-6\emwidth\relax
+ \resetallattributes
+ \lefttoright
+ \dontleavehmode
+ \ifnum\recurselevel=\scratchcounter
+ \ruledhbox{\box\otfcompositionbox}%
+ \else
+ \ruledhbox{\showotfstepglyphs\recurselevel}%
+ \fi
+ \quad
+ \showotfstepchars\recurselevel
+ \hfill
+ \par
+ \egroup
+ \egroup
+ \blank}
+
\unexpanded\def\showotfsteps
{\begingroup
\veryraggedright
@@ -215,41 +261,28 @@
\blank
\scratchcounter\otfnoffeaturesteps\relax
\dorecurse\scratchcounter
- {\blank
- \begingroup
- \advance\leftskip6\emwidth
- \showotfstepmessages\recurselevel
- \par
- \endgroup
- \blank
- \dontleavehmode
- \hbox to \hsize \bgroup
- \hbox to 6\emwidth \bgroup
- \bf
- \ifnum\recurselevel=\scratchcounter result\else step \recurselevel\fi
- \hss
- \egroup
- \vtop \bgroup
- \hsize\dimexpr\hsize-6\emwidth\relax
- \resetallattributes
- \pardir TLT\textdir TLT\relax
- \dontleavehmode
- \ifnum\recurselevel=\scratchcounter
- \ruledhbox{\box\otfcompositionbox}%
- \else
- \ruledhbox{\showotfstepglyphs\recurselevel}%
- \fi
- \quad
- \showotfstepchars\recurselevel
- \hfill
- \par
- \egroup
- \egroup
- \blank}%
+ {\ifcase\showotfstepsmode
+ \or % 1 = only first
+ \ifnum\recurselevel=\plusone
+ \showotfsteps_n
+ \fi
+ \or % 2 = only last
+ \ifnum\recurselevel=\scratchcounter
+ \showotfsteps_n
+ \fi
+ \or % 3 = first and last
+ \ifnum\recurselevel=\plusone
+ \showotfsteps_n
+ \else\ifnum\recurselevel=\scratchcounter
+ \showotfsteps_n
+ \fi\fi
+ \else % everything
+ \showotfsteps_n
+ \fi}%
\endgroup}
\unexpanded\def\startotfsample
- {\enabletrackers[*otf.sample]% beware, kind of global
+ {\enabletrackers[otf.sample.silent]% beware, kind of global
\startotfcollecting
\begingroup
\veryraggedright
@@ -259,7 +292,7 @@
\unexpanded\def\stopotfsample
{\endgroup
\stopotfcollecting
- \disabletrackers[*otf.sample]% beware, kind of global: otf.sample
+ \disabletrackers[otf.sample]% beware, kind of global: otf.sample
\showotfsteps
\resetotfcollecting}
@@ -275,6 +308,9 @@
\letvalue{\??otfcompositiondir +1}\lefttoright
\letvalue{\??otfcompositiondir 1}\lefttoright
+\unexpanded\def\setotfcompositiondirection#1%
+ {\begincsname\??otfcompositiondir#1\endcsname}
+
\unexpanded\def\showotfcomposition#1#2#3% {font*features at size}, rl=-1, text
{\begingroup
\forgetparindent
@@ -283,10 +319,45 @@
\setupalign[\v!verytolerant,\v!flushleft]%
\startotfsample
\nohyphens
- \global\setbox\otfcompositionbox\hbox{\definedfont[#1]\relax\getvalue{\??otfcompositiondir#2}\relax#3}%
+ \global\setbox\otfcompositionbox\hbox{\definedfont[#1]\relax\setotfcompositiondirection{#2}\relax#3}%
\stopotfsample
\endgroup}
+%D \startbuffer
+%D \startotfcompositionlist{Serif*default @ 11pt}{l2r}%
+%D \showotfcompositionsample{effe}
+%D \stopotfcompositionlist
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
+\unexpanded\def\showotfcompositionlist#1#2#3%
+ {\begingroup
+ \definedfont[#1]%
+ \setbox\scratchbox\hbox\bgroup
+ \setotfcompositiondirection{#2}%
+ #3%
+ \egroup
+ \strut
+ \def|##1|{\kern\onepoint\string|\kern\onepoint##1\kern\onepoint\string|\kern\onepoint}%
+ \cldcontext{nodes.listtoutf(tex.box[\number\scratchbox].list,"{\\kern\\onepoint}",true)}%
+ \endgroup}
+
+\unexpanded\def\startotfcompositionlist#1#2#3\stopotfcompositionlist
+ {\begingroup
+ \unexpanded\def\showotfcompositionsample##1%
+ {\NC\type{##1}%
+ \NC\showotfcompositionlist{Mono}{#2}{##1}%
+ \NC\showotfcompositionlist{#1}{#2}{##1}%
+ \NC\definedfont[#1]##1%
+ \NC\NR}%
+ \starttabulate[|||||]%
+ #3%
+ \stoptabulate
+ \endgroup}
+
+\let\stopotfcompositionlist\relax
+
% new
\unexpanded\def\savefont[#1]% not yet in i-*.xml
diff --git a/tex/context/base/mkiv/font-ttf.lua b/tex/context/base/mkiv/font-ttf.lua
index 339764d4a..df08787f9 100644
--- a/tex/context/base/mkiv/font-ttf.lua
+++ b/tex/context/base/mkiv/font-ttf.lua
@@ -33,7 +33,7 @@ if not modules then modules = { } end modules ['font-ttf'] = {
-- delta = (1-factor)*left + factor * right
local next, type, unpack = next, type, unpack
-local bittest, band, rshift = bit32.btest, bit32.band, bit32.rshift
+local band, rshift = bit32.band, bit32.rshift
local sqrt, round = math.sqrt, math.round
local char = string.char
local concat = table.concat
@@ -698,7 +698,7 @@ local function readglyph(f,nofcontours) -- read deltas here, saves space
while i <= nofpoints do
local flag = readbyte(f)
flags[i] = flag
- if bittest(flag,0x08) then
+ if band(flag,0x08) ~= 0 then
for j=1,readbyte(f) do
i = i + 1
flags[i] = flag
@@ -711,8 +711,8 @@ local function readglyph(f,nofcontours) -- read deltas here, saves space
local x = 0
for i=1,nofpoints do
local flag = flags[i]
- local short = bittest(flag,0x02)
- local same = bittest(flag,0x10)
+ local short = band(flag,0x02) ~= 0
+ local same = band(flag,0x10) ~= 0
if short then
if same then
x = x + readbyte(f)
@@ -724,13 +724,13 @@ local function readglyph(f,nofcontours) -- read deltas here, saves space
else
x = x + readshort(f)
end
- points[i] = { x, 0, bittest(flag,0x01) }
+ points[i] = { x, 0, band(flag,0x01) ~= 0 }
end
local y = 0
for i=1,nofpoints do
local flag = flags[i]
- local short = bittest(flag,0x04)
- local same = bittest(flag,0x20)
+ local short = band(flag,0x04) ~= 0
+ local same = band(flag,0x20) ~= 0
if short then
if same then
y = y + readbyte(f)
@@ -759,19 +759,19 @@ local function readcomposite(f)
while true do
local flags = readushort(f)
local index = readushort(f)
- ----- f_words = bittest(flags,0x0001)
- local f_xyarg = bittest(flags,0x0002)
- ----- f_round = bittest(flags,0x0004+0x0002)
- ----- f_scale = bittest(flags,0x0008)
- ----- f_reserved = bittest(flags,0x0010)
- ----- f_more = bittest(flags,0x0020)
- ----- f_xyscale = bittest(flags,0x0040)
- ----- f_matrix = bittest(flags,0x0080)
- ----- f_instruct = bittest(flags,0x0100)
- ----- f_usemine = bittest(flags,0x0200)
- ----- f_overlap = bittest(flags,0x0400)
- local f_offset = bittest(flags,0x0800)
- ----- f_uoffset = bittest(flags,0x1000)
+ ----- f_words = band(flags,0x0001) ~= 0
+ local f_xyarg = band(flags,0x0002) ~= 0
+ ----- f_round = band(flags,0x0006) ~= 0 -- 2 + 4
+ ----- f_scale = band(flags,0x0008) ~= 0
+ ----- f_reserved = band(flags,0x0010) ~= 0
+ ----- f_more = band(flags,0x0020) ~= 0
+ ----- f_xyscale = band(flags,0x0040) ~= 0
+ ----- f_matrix = band(flags,0x0080) ~= 0
+ ----- f_instruct = band(flags,0x0100) ~= 0
+ ----- f_usemine = band(flags,0x0200) ~= 0
+ ----- f_overlap = band(flags,0x0400) ~= 0
+ local f_offset = band(flags,0x0800) ~= 0
+ ----- f_uoffset = band(flags,0x1000) ~= 0
local xscale = 1
local xrotate = 0
local yrotate = 0
@@ -781,7 +781,7 @@ local function readcomposite(f)
local base = false
local reference = false
if f_xyarg then
- if bittest(flags,0x0001) then -- f_words
+ if band(flags,0x0001) ~= 0 then -- f_words
xoffset = readshort(f)
yoffset = readshort(f)
else
@@ -789,7 +789,7 @@ local function readcomposite(f)
yoffset = readchar(f) -- signed byte, stupid name
end
else
- if bittest(flags,0x0001) then -- f_words
+ if band(flags,0x0001) ~= 0 then -- f_words
base = readshort(f)
reference = readshort(f)
else
@@ -797,21 +797,21 @@ local function readcomposite(f)
reference = readchar(f) -- signed byte, stupid name
end
end
- if bittest(flags,0x0008) then -- f_scale
+ if band(flags,0x0008) ~= 0 then -- f_scale
xscale = read2dot14(f)
yscale = xscale
if f_xyarg and f_offset then
xoffset = xoffset * xscale
yoffset = yoffset * yscale
end
- elseif bittest(flags,0x0040) then -- f_xyscale
+ elseif band(flags,0x0040) ~= 0 then -- f_xyscale
xscale = read2dot14(f)
yscale = read2dot14(f)
if f_xyarg and f_offset then
xoffset = xoffset * xscale
yoffset = yoffset * yscale
end
- elseif bittest(flags,0x0080) then -- f_matrix
+ elseif band(flags,0x0080) ~= 0 then -- f_matrix
xscale = read2dot14(f)
xrotate = read2dot14(f)
yrotate = read2dot14(f)
@@ -824,16 +824,16 @@ local function readcomposite(f)
nofcomponents = nofcomponents + 1
components[nofcomponents] = {
index = index,
- usemine = bittest(flags,0x0200), -- f_usemine
- round = bittest(flags,0x0006), -- f_round,
+ usemine = band(flags,0x0200) ~= 0, -- f_usemine
+ round = band(flags,0x0006) ~= 0, -- f_round,
base = base,
reference = reference,
matrix = { xscale, xrotate, yrotate, yscale, xoffset, yoffset },
}
- if bittest(flags,0x0100) then
+ if band(flags,0x0100) ~= 0 then
instructions = true
end
- if not bittest(flags,0x0020) then -- f_more
+ if not band(flags,0x0020) ~= 0 then -- f_more
break
end
end
@@ -963,7 +963,7 @@ local function readpoints(f)
else
if count < 128 then
-- no second byte, use count
- elseif bittest(count,0x80) then
+ elseif band(count,0x80) ~= 0 then
count = band(count,0x7F) * 256 + readbyte(f)
else
-- bad news
@@ -973,7 +973,7 @@ local function readpoints(f)
local n = 1 -- indices
while p < count do
local control = readbyte(f)
- local runreader = bittest(control,0x80) and readushort or readbyte
+ local runreader = band(control,0x80) ~= 0 and readushort or readbyte
local runlength = band(control,0x7F)
for i=1,runlength+1 do
n = n + runreader(f)
@@ -994,12 +994,12 @@ local function readdeltas(f,nofpoints)
if not control then
break
end
- local allzero = bittest(control,0x80)
+ local allzero = band(control,0x80) ~= 0
local runlength = band(control,0x3F) + 1
if allzero then
z = z + runlength
else
- local runreader = bittest(control,0x40) and readshort or readinteger
+ local runreader = band(control,0x40) ~= 0 and readshort or readinteger
if z > 0 then
for i=1,z do
p = p + 1
@@ -1035,7 +1035,7 @@ local function readdeltas(f,nofpoints)
while nofpoints > 0 do
local control = readbyte(f)
if control then
- local allzero = bittest(control,0x80)
+ local allzero = band(control,0x80) ~= 0
local runlength = band(control,0x3F) + 1
if allzero then
for i=1,runlength do
@@ -1043,7 +1043,7 @@ local function readdeltas(f,nofpoints)
deltas[p] = 0
end
else
- local runreader = bittest(control,0x40) and readshort or readinteger
+ local runreader = band(control,0x40) ~= 0 and readshort or readinteger
for i=1,runlength do
p = p + 1
deltas[p] = runreader(f)
@@ -1088,7 +1088,7 @@ function readers.gvar(f,fontdata,specification,glyphdata,shapedata)
local dowidth = not fontdata.variabledata.hvarwidths
-- there is one more offset (so that one can calculate the size i suppose)
-- so we could test for overflows but we simply assume sane font files
- if bittest(flags,0x0001) then
+ if band(flags,0x0001) ~= 0 then
for i=1,nofglyphs+1 do
data[i] = dataoffset + readulong(f)
end
@@ -1130,7 +1130,7 @@ function readers.gvar(f,fontdata,specification,glyphdata,shapedata)
local allpoints = (shape.nofpoints or 0) -- + 1
local shared = false
local nofshared = 0
- if bittest(flags,0x8000) then -- has shared points
+ if band(flags,0x8000) ~= 0 then -- has shared points
-- go to the packed stream (get them once)
local current = getposition(f)
setposition(f,offset)
@@ -1143,9 +1143,9 @@ function readers.gvar(f,fontdata,specification,glyphdata,shapedata)
local size = readushort(f) -- check
local flags = readushort(f)
local index = band(flags,0x0FFF)
- local haspeak = bittest(flags,0x8000)
- local intermediate = bittest(flags,0x4000)
- local private = bittest(flags,0x2000)
+ local haspeak = band(flags,0x8000) ~= 0
+ local intermediate = band(flags,0x4000) ~= 0
+ local private = band(flags,0x2000) ~= 0
local peak = nil
local start = nil
local stop = nil
diff --git a/tex/context/base/mkiv/font-unk.mkiv b/tex/context/base/mkiv/font-unk.mkiv
index 988e5df3d..4b2615c20 100644
--- a/tex/context/base/mkiv/font-unk.mkiv
+++ b/tex/context/base/mkiv/font-unk.mkiv
@@ -52,17 +52,19 @@
\definefontsynonym [Handwriting] [unknown]
\definefontsynonym [Calligraphic] [unknown]
-%D This permit us to define (use) fonts that refer to the default
-%D style (so, Bold may expand to SansBold or SerifBold, depending
-%D on the default style in the typeface).
-
-\definefontsynonym[\s!Normal] [\noexpand\v_font_string_d]
-\definefontsynonym[\s!Bold] [\noexpand\v_font_string_d\noexpand\s!Bold]
-\definefontsynonym[\s!Italic] [\noexpand\v_font_string_d\noexpand\s!Italic]
-\definefontsynonym[\s!Slanted] [\noexpand\v_font_string_d\noexpand\s!Slanted]
-\definefontsynonym[\s!BoldItalic] [\noexpand\v_font_string_d\noexpand\s!BoldItalic]
-\definefontsynonym[\s!BoldSlanted][\noexpand\v_font_string_d\noexpand\s!BoldSlanted]
-\definefontsynonym[\s!Caps] [\noexpand\v_font_string_d\noexpand\s!Caps]
+%D This permit us to define (use) fonts that refer to the default style (so, Bold
+%D may expand to SansBold or SerifBold, depending on the default style in the
+%D typeface). The \LUA\ call is used to set the current alternative in a
+%D non|-|interfering way. This make sure that the CurrentFont synonym (in font-sym)
+%D is working as expected (e.g.\ in MixedCaps). Yes, this is complicates stuff.
+
+\definefontsynonym[\s!Normal] [\noexpand\clf_tf\noexpand\v_font_string_d]
+\definefontsynonym[\s!Bold] [\noexpand\clf_bf\noexpand\v_font_string_d\noexpand\s!Bold]
+\definefontsynonym[\s!Italic] [\noexpand\clf_it\noexpand\v_font_string_d\noexpand\s!Italic]
+\definefontsynonym[\s!Slanted] [\noexpand\clf_sl\noexpand\v_font_string_d\noexpand\s!Slanted]
+\definefontsynonym[\s!BoldItalic] [\noexpand\clf_bi\noexpand\v_font_string_d\noexpand\s!BoldItalic]
+\definefontsynonym[\s!BoldSlanted][\noexpand\clf_bs\noexpand\v_font_string_d\noexpand\s!BoldSlanted]
+\definefontsynonym[\s!Caps] [\noexpand\clf_tf\noexpand\v_font_string_d\noexpand\s!Caps]
%D Also handy:
@@ -143,8 +145,7 @@
\definebodyfont [default] [cg]
[\s!tf=Calligraphy sa 1]
-%D These definitions come into action as soon as names are
-%D mapped onto real file names (or names that themselves are
-%D mapped).
+%D These definitions come into action as soon as names are mapped onto real file
+%D names (or names that themselves are mapped).
\protect \endinput
diff --git a/tex/context/base/mkiv/font-vf.lua b/tex/context/base/mkiv/font-vf.lua
deleted file mode 100644
index 401e84956..000000000
--- a/tex/context/base/mkiv/font-vf.lua
+++ /dev/null
@@ -1,206 +0,0 @@
-if not modules then modules = { } end modules ['font-vf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-This is very experimental code! Not yet adapted to recent changes. This will change.
---ldx]]--
-
--- present in the backend but unspecified:
---
--- vf.rule vf.special vf.right vf.push vf.down vf.char vf.node vf.fontid vf.pop vf.image vf.nop
-
-local next = next
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-local fastcopy = table.fastcopy
-
-local fonts = fonts
-local constructors = fonts.constructors
-local vf = constructors.handlers.vf
-vf.version = 1.000 -- same as tfm
-
---[[ldx--
-We overload the reader.
---ldx]]--
-
--- general code / already frozen
---
--- function vf.find(name)
--- name = file.removesuffix(file.basename(name))
--- if constructors.resolvevirtualtoo then
--- local format = fonts.loggers.format(name)
--- if format == 'tfm' or format == 'ofm' then
--- if trace_defining then
--- report_defining("locating vf for %a",name)
--- end
--- return findbinfile(name,"ovf") or ""
--- else
--- if trace_defining then
--- report_defining("vf for %a is already taken care of",name)
--- end
--- return ""
--- end
--- else
--- if trace_defining then
--- report_defining("locating vf for %a",name)
--- end
--- return findbinfile(name,"ovf") or ""
--- end
--- end
---
--- callbacks.register('find_vf_file', vf.find, "locating virtual fonts, insofar needed") -- not that relevant any more
-
--- specific code (will move to other module)
-
-local definers = fonts.definers
-local methods = definers.methods
-
-local variants = allocate()
-local combinations = { }
-local combiner = { }
-local whatever = allocate()
-local helpers = allocate()
-local predefined = allocate {
- dummy = { "comment" },
- push = { "push" },
- pop = { "pop" },
-}
-
-methods.variants = variants -- todo .. wrong namespace
-vf.combinations = combinations
-vf.combiner = combiner
-vf.whatever = whatever
-vf.helpers = helpers
-vf.predefined = predefined
-
-setmetatableindex(whatever, function(t,k) local v = { } t[k] = v return v end)
-
-local function checkparameters(g,f)
- if f and g and not g.parameters and #g.fonts > 0 then
- local p = { }
- for k,v in next, f.parameters do
- p[k] = v
- end
- g.parameters = p
- setmetatable(p, getmetatable(f.parameters))
- end
-end
-
-function methods.install(tag, rules)
- vf.combinations[tag] = rules
- variants[tag] = function(specification)
- return vf.combine(specification,tag)
- end
-end
-
-local function combine_load(g,name)
- return constructors.readanddefine(name or g.specification.name,g.specification.size)
-end
-
-local function combine_assign(g, name, from, to, start, force)
- local f, id = combine_load(g,name)
- if f and id then
- -- optimize for whole range, then just g = f
- if not from then from, to = 0, 0xFF00 end
- if not to then to = from end
- if not start then start = from end
- local fc, gc = f.characters, g.characters
- local fd, gd = f.descriptions, g.descriptions
- local hn = #g.fonts+1
- g.fonts[hn] = { id = id } -- no need to be sparse
- for i=from,to do
- if fc[i] and (force or not gc[i]) then
- gc[i] = fastcopy(fc[i],true) -- can be optimized
- gc[i].commands = { { 'slot', hn, start } }
- gd[i] = fd[i]
- end
- start = start + 1
- end
- checkparameters(g,f)
- end
-end
-
-local function combine_process(g,list)
- if list then
- for _,v in next, list do
- (combiner.commands[v[1]] or nop)(g,v)
- end
- end
-end
-
-local function combine_names(g,name,force)
- local f, id = constructors.readanddefine(name,g.specification.size)
- if f and id then
- local fc, gc = f.characters, g.characters
- local fd, gd = f.descriptions, g.descriptions
- g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse
- local hn = #g.fonts
- for k, v in next, fc do
- if force or not gc[k] then
- gc[k] = fastcopy(v,true)
- gc[k].commands = { { 'slot', hn, k } }
- gd[i] = fd[i]
- end
- end
- checkparameters(g,f)
- end
-end
-
-local combine_feature = function(g,v)
- local key, value = v[2], v[3]
- if key then
- if value == nil then
- value = true
- end
- local specification = g.specification
- if specification then
- local normalfeatures = specification.features.normal
- if normalfeatures then
- normalfeatures[key] = value -- otf?
- end
- end
- end
-end
-
---~ combiner.load = combine_load
---~ combiner.assign = combine_assign
---~ combiner.process = combine_process
---~ combiner.names = combine_names
---~ combiner.feature = combine_feature
-
-combiner.commands = allocate {
- ["initialize"] = function(g,v) combine_assign (g,g.properties.name) end,
- ["include-method"] = function(g,v) combine_process (g,combinations[v[2]]) end, -- name
- -- ["copy-parameters"] = function(g,v) combine_parameters(g,v[2]) end, -- name
- ["copy-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
- ["copy-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
- ["fallback-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
- ["fallback-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
- ["copy-names"] = function(g,v) combine_names (g,v[2],true) end,
- ["fallback-names"] = function(g,v) combine_names (g,v[2],false) end,
- ["feature"] = combine_feature,
-}
-
-function vf.combine(specification,tag)
- local g = {
- name = specification.name,
- properties = {
- virtualized = true,
- },
- fonts = {
- },
- characters = {
- },
- descriptions = {
- },
- specification = fastcopy(specification),
- }
- combine_process(g,combinations[tag])
- return g
-end
diff --git a/tex/context/base/mkiv/font-vir.lua b/tex/context/base/mkiv/font-vir.lua
new file mode 100644
index 000000000..03ad7fc85
--- /dev/null
+++ b/tex/context/base/mkiv/font-vir.lua
@@ -0,0 +1,206 @@
+if not modules then modules = { } end modules ['font-vir'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+This is very experimental code! Not yet adapted to recent changes. This will change.
+--ldx]]--
+
+-- present in the backend but unspecified:
+--
+-- vf.rule vf.special vf.right vf.push vf.down vf.char vf.node vf.fontid vf.pop vf.image vf.nop
+
+local next = next
+
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+local fastcopy = table.fastcopy
+
+local fonts = fonts
+local constructors = fonts.constructors
+local vf = constructors.handlers.vf
+vf.version = 1.000 -- same as tfm
+
+--[[ldx--
+We overload the reader.
+--ldx]]--
+
+-- general code / already frozen
+--
+-- function vf.find(name)
+-- name = file.removesuffix(file.basename(name))
+-- if constructors.resolvevirtualtoo then
+-- local format = fonts.loggers.format(name)
+-- if format == 'tfm' or format == 'ofm' then
+-- if trace_defining then
+-- report_defining("locating vf for %a",name)
+-- end
+-- return findbinfile(name,"ovf") or ""
+-- else
+-- if trace_defining then
+-- report_defining("vf for %a is already taken care of",name)
+-- end
+-- return ""
+-- end
+-- else
+-- if trace_defining then
+-- report_defining("locating vf for %a",name)
+-- end
+-- return findbinfile(name,"ovf") or ""
+-- end
+-- end
+--
+-- callbacks.register('find_vf_file', vf.find, "locating virtual fonts, insofar needed") -- not that relevant any more
+
+-- specific code (will move to other module)
+
+local definers = fonts.definers
+local methods = definers.methods
+
+local variants = allocate()
+local combinations = { }
+local combiner = { }
+local whatever = allocate()
+local helpers = allocate()
+local predefined = allocate {
+ dummy = { "comment" },
+ push = { "push" },
+ pop = { "pop" },
+}
+
+methods.variants = variants -- todo .. wrong namespace
+vf.combinations = combinations
+vf.combiner = combiner
+vf.whatever = whatever
+vf.helpers = helpers
+vf.predefined = predefined
+
+setmetatableindex(whatever, function(t,k) local v = { } t[k] = v return v end)
+
+local function checkparameters(g,f)
+ if f and g and not g.parameters and #g.fonts > 0 then
+ local p = { }
+ for k,v in next, f.parameters do
+ p[k] = v
+ end
+ g.parameters = p
+ setmetatable(p, getmetatable(f.parameters))
+ end
+end
+
+function methods.install(tag, rules)
+ vf.combinations[tag] = rules
+ variants[tag] = function(specification)
+ return vf.combine(specification,tag)
+ end
+end
+
+local function combine_load(g,name)
+ return constructors.readanddefine(name or g.specification.name,g.specification.size)
+end
+
+local function combine_assign(g, name, from, to, start, force)
+ local f, id = combine_load(g,name)
+ if f and id then
+ -- optimize for whole range, then just g = f
+ if not from then from, to = 0, 0xFF00 end
+ if not to then to = from end
+ if not start then start = from end
+ local fc, gc = f.characters, g.characters
+ local fd, gd = f.descriptions, g.descriptions
+ local hn = #g.fonts+1
+ g.fonts[hn] = { id = id } -- no need to be sparse
+ for i=from,to do
+ if fc[i] and (force or not gc[i]) then
+ gc[i] = fastcopy(fc[i],true) -- can be optimized
+ gc[i].commands = { { "slot", hn, start } }
+ gd[i] = fd[i]
+ end
+ start = start + 1
+ end
+ checkparameters(g,f)
+ end
+end
+
+local function combine_process(g,list)
+ if list then
+ for _,v in next, list do
+ (combiner.commands[v[1]] or nop)(g,v)
+ end
+ end
+end
+
+local function combine_names(g,name,force)
+ local f, id = constructors.readanddefine(name,g.specification.size)
+ if f and id then
+ local fc, gc = f.characters, g.characters
+ local fd, gd = f.descriptions, g.descriptions
+ g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse
+ local hn = #g.fonts
+ for k, v in next, fc do
+ if force or not gc[k] then
+ gc[k] = fastcopy(v,true)
+ gc[k].commands = { { "slot", hn, k } }
+ gd[i] = fd[i]
+ end
+ end
+ checkparameters(g,f)
+ end
+end
+
+local combine_feature = function(g,v)
+ local key, value = v[2], v[3]
+ if key then
+ if value == nil then
+ value = true
+ end
+ local specification = g.specification
+ if specification then
+ local normalfeatures = specification.features.normal
+ if normalfeatures then
+ normalfeatures[key] = value -- otf?
+ end
+ end
+ end
+end
+
+--~ combiner.load = combine_load
+--~ combiner.assign = combine_assign
+--~ combiner.process = combine_process
+--~ combiner.names = combine_names
+--~ combiner.feature = combine_feature
+
+combiner.commands = allocate {
+ ["initialize"] = function(g,v) combine_assign (g,g.properties.name) end,
+ ["include-method"] = function(g,v) combine_process (g,combinations[v[2]]) end, -- name
+ -- ["copy-parameters"] = function(g,v) combine_parameters(g,v[2]) end, -- name
+ ["copy-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
+ ["copy-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
+ ["fallback-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
+ ["fallback-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
+ ["copy-names"] = function(g,v) combine_names (g,v[2],true) end,
+ ["fallback-names"] = function(g,v) combine_names (g,v[2],false) end,
+ ["feature"] = combine_feature,
+}
+
+function vf.combine(specification,tag)
+ local g = {
+ name = specification.name,
+ properties = {
+ virtualized = true,
+ },
+ fonts = {
+ },
+ characters = {
+ },
+ descriptions = {
+ },
+ specification = fastcopy(specification),
+ }
+ combine_process(g,combinations[tag])
+ return g
+end
diff --git a/tex/context/base/mkiv/good-mth.lua b/tex/context/base/mkiv/good-mth.lua
index 661189350..18a97976f 100644
--- a/tex/context/base/mkiv/good-mth.lua
+++ b/tex/context/base/mkiv/good-mth.lua
@@ -15,6 +15,7 @@ local trace_goodies = false trackers.register("fonts.goodies", function(v)
local report_goodies = logs.reporter("fonts","goodies")
local registerotffeature = fonts.handlers.otf.features.register
+
local fontgoodies = fonts.goodies or { }
local fontcharacters = fonts.hashes.characters
diff --git a/tex/context/base/mkiv/grph-con.lua b/tex/context/base/mkiv/grph-con.lua
index 49b5952df..f106e1036 100644
--- a/tex/context/base/mkiv/grph-con.lua
+++ b/tex/context/base/mkiv/grph-con.lua
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['grph-con'] = {
local P, R, S, Cc, C, Cs, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.Cc, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.match
+local tonumber = tonumber
local longtostring = string.longtostring
local formatters = string.formatters
local expandfilename = dir.expandname
diff --git a/tex/context/base/mkiv/grph-epd.lua b/tex/context/base/mkiv/grph-epd.lua
index 17f04d593..7855ce891 100644
--- a/tex/context/base/mkiv/grph-epd.lua
+++ b/tex/context/base/mkiv/grph-epd.lua
@@ -11,20 +11,39 @@ local settings_to_hash = utilities.parsers.settings_to_hash
-- todo: page, name, file, url
+-- I have some experimental code for including comments and fields but it's
+-- unfinished and not included as it was just a proof of concept to get some idea
+-- about what is needed and possible. But the placeholders are here already.
+
local codeinjections = backends.codeinjections
-function figures.mergegoodies(optionlist)
+local function mergegoodies(optionlist)
local options = settings_to_hash(optionlist)
- local all = options[variables.all] or options[variables.yes]
+ local all = options[variables.all] or options[variables.yes]
if all or options[variables.reference] then
codeinjections.mergereferences()
end
- if all or options[variables.layer] then
- codeinjections.mergeviewerlayers()
+ if all or options[variables.comment] then
+ codeinjections.mergecomments()
end
if all or options[variables.bookmark] then
codeinjections.mergebookmarks()
end
+ if all or options[variables.field] then
+ codeinjections.mergefields()
+ end
+ if all or options[variables.layer] then
+ codeinjections.mergeviewerlayers()
+ end
+ codeinjections.flushmergelayer()
+end
+
+function figures.mergegoodies(optionlist)
+ context.stepwise(function()
+ -- we use stepwise because we might need to define symbols
+ -- for stamps that have no default appearance
+ mergegoodies(optionlist)
+ end)
end
interfaces.implement {
diff --git a/tex/context/base/mkiv/grph-epd.mkiv b/tex/context/base/mkiv/grph-epd.mkiv
index 8152772aa..45c11795b 100644
--- a/tex/context/base/mkiv/grph-epd.mkiv
+++ b/tex/context/base/mkiv/grph-epd.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{grph-epd}{1.001}
+\registerctxluafile{grph-epd}{}
\def\figurereference{\clf_figurestatus{reference}{}} % might become private
@@ -43,6 +43,34 @@
\fi
\to \externalfigurepostprocessors
+\defineframed
+ [epdfstampsymbol]
+ [\c!foregroundstyle=\v!mono,
+ \c!background=\c!color,
+ \c!rulethickness=.125\exheight,
+ \c!offset=.250\exheight,
+ \c!backgroundcolor=lightgray, % can be adapted before first usage
+ \c!framecolor=darkgray, % can be adapted before first usage
+ \c!corner=\v!round]
+
+\definesymbol[Stamped] [\epdfstampsymbol{Stamped}]
+
+\definesymbol[Approved] [\epdfstampsymbol{Approved}]
+\definesymbol[Experimental] [\epdfstampsymbol{Experimental}]
+\definesymbol[NotApproved] [\epdfstampsymbol{NotApproved}]
+\definesymbol[AsIs] [\epdfstampsymbol{AsIs}]
+\definesymbol[Expired] [\epdfstampsymbol{Expired}]
+\definesymbol[NotForPublicRelease] [\epdfstampsymbol{NotForPublicRelease}]
+\definesymbol[Confidential] [\epdfstampsymbol{Confidential}]
+\definesymbol[Final] [\epdfstampsymbol{Final}]
+\definesymbol[Sold] [\epdfstampsymbol{Sold}]
+\definesymbol[Departmental] [\epdfstampsymbol{Departmental}]
+\definesymbol[ForComment] [\epdfstampsymbol{ForComment}]
+\definesymbol[TopSecret] [\epdfstampsymbol{TopSecret}]
+\definesymbol[Draft] [\epdfstampsymbol{Draft}]
+\definesymbol[ForPublicRelease] [\epdfstampsymbol{ForPublicRelease}]
+
+
\protect \endinput
% /Properties << /xxxx 22 0 R >>
diff --git a/tex/context/base/mkiv/grph-fig.mkiv b/tex/context/base/mkiv/grph-fig.mkiv
index 1fdc0caa0..3bf3248a6 100644
--- a/tex/context/base/mkiv/grph-fig.mkiv
+++ b/tex/context/base/mkiv/grph-fig.mkiv
@@ -67,7 +67,6 @@
\def\grph_buffers_run_indeed[#1][#2]%
{\xdef\lasttypesetbuffer{\clf_runbuffer{#1}{#2}}}
-
% For manuals and such:
%
% \definetypesetting [name] [options] [settings-a]
@@ -99,11 +98,13 @@
\grph_typesetting_process_indeed{}{#2}{#1}{}%
\fi\fi\fi}
+\defineexternalfigure[typesetting] % so one can set a frame and such
+
\def\grph_typesetting_process_indeed#1#2#3#4% options settings-a filename settings-b
{\begingroup
\edef\m_typesetting_name{\clf_runcontextjob{#3}{#1}}%
\ifx\m_typesetting_name\empty \else
- \expanded{\externalfigure[\m_typesetting_name]}[#2,#4]%
+ \expanded{\externalfigure[\m_typesetting_name][typesetting]}[#2,#4]%
\fi
\endgroup}
diff --git a/tex/context/base/mkiv/grph-inc.lua b/tex/context/base/mkiv/grph-inc.lua
index b5e74b4c1..47eb7bbbb 100644
--- a/tex/context/base/mkiv/grph-inc.lua
+++ b/tex/context/base/mkiv/grph-inc.lua
@@ -40,14 +40,17 @@ run TeX code from within Lua. Some more functionality will move to Lua.
-- todo: store loaded pages per pdf file someplace
+local tonumber, tostring, next = tonumber, tostring, next
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
local longtostring = string.longtostring
local contains = table.contains
+local sortedhash = table.sortedhash
local concat, insert, remove = table.concat, table.insert, table.remove
local todimen = string.todimen
local collapsepath = file.collapsepath
local formatters = string.formatters
local formatcolumns = utilities.formatters.formatcolumns
+local max, odd = math.max, math.odd
local P, R, S, Cc, C, Cs, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.Cc, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.match
@@ -89,14 +92,8 @@ local trace_usage = false trackers.register ("graphics.usage", func
local extra_check = false directives.register("graphics.extracheck", function(v) extra_check = v end)
local auto_transform = true directives.register("graphics.autotransform", function(v) auto_transform = v end)
-if LUATEXVERSION <= 1 then
- auto_transform = false
-end
-
+local report = logs.reporter("graphics")
local report_inclusion = logs.reporter("graphics","inclusion")
-local report_figures = logs.reporter("system","graphics")
-local report_figure = logs.reporter("used graphic")
-local report_newline = logs.newline
local f_hash_part = formatters["%s->%s->%s->%s"]
local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s->%s"]
@@ -107,7 +104,7 @@ local v_local = variables["local"]
local v_default = variables.default
local v_auto = variables.auto
-local maxdimen = 2^30-1
+local maxdimen = 0x3FFFFFFF -- 2^30-1
local ctx_doscalefigure = context.doscalefigure
local ctx_relocateexternalfigure = context.relocateexternalfigure
@@ -116,7 +113,9 @@ local ctx_stopfoundexternalfigure = context.stopfoundexternalfigure
local ctx_dosetfigureobject = context.dosetfigureobject
local ctx_doboxfigureobject = context.doboxfigureobject
-function images.check(figure)
+-- extensions
+
+function checkimage(figure)
if figure then
local width = figure.width
local height = figure.height
@@ -164,8 +163,6 @@ local function imagetotable(imgtable)
return result
end
-images.totable = imagetotable
-
function images.serialize(i,...)
return table.serialize(imagetotable(i),...)
end
@@ -184,7 +181,7 @@ end
local validsizes = table.tohash(images.boxes())
local validtypes = table.tohash(images.types())
-function images.checksize(size)
+local function checkimagesize(size)
if size then
size = gsub(size,"box","")
return validsizes[size] and size or "crop"
@@ -193,6 +190,17 @@ function images.checksize(size)
end
end
+local newimage = images.new
+local scanimage = images.scan
+local copyimage = images.copy
+local cloneimage = images.clone
+local imagetonode = images.node
+
+images.check = checkimage
+images.checksize = checkimagesize
+images.tonode = imagetonode
+images.totable = imagetotable
+
local indexed = { }
function images.ofindex(n)
@@ -211,6 +219,7 @@ figures.defaultwidth = 0
figures.defaultheight = 0
figures.defaultdepth = 0
figures.nofprocessed = 0
+figures.nofmissing = 0
figures.preferquality = true -- quality over location
local figures_loaded = allocate() figures.loaded = figures_loaded
@@ -305,31 +314,38 @@ function figures.badname(name)
end
end
-luatex.registerstopactions(function()
+logs.registerfinalactions(function()
+ local done = false
if trace_usage and figures.nofprocessed > 0 then
- logs.pushtarget("logfile")
- report_newline()
- report_figures("start names")
- for _, data in table.sortedhash(figures_found) do
- report_newline()
- report_figure("asked : %s",data.askedname)
+ logs.startfilelogging(report,"names")
+ for _, data in sortedhash(figures_found) do
+ if done then
+ report()
+ else
+ done = true
+ end
+ report("asked : %s",data.askedname)
if data.found then
- report_figure("format : %s",data.format)
- report_figure("found : %s",data.foundname)
- report_figure("used : %s",data.fullname)
+ report("format : %s",data.format)
+ report("found : %s",data.foundname)
+ report("used : %s",data.fullname)
if data.badname then
- report_figure("comment : %s","bad name")
+ report("comment : %s","bad name")
elseif data.comment then
- report_figure("comment : %s",data.comment)
+ report("comment : %s",data.comment)
end
else
- report_figure("comment : %s","not found")
+ report("comment : %s","not found")
end
end
- report_newline()
- report_figures("stop names")
- report_newline()
- logs.poptarget()
+ logs.stopfilelogging()
+ end
+ if figures.nofmissing > 0 and logs.loggingerrors() then
+ logs.starterrorlogging(report,"missing figures")
+ for _, data in sortedhash(figures_found) do
+ report("%w%s",6,data.askedname)
+ end
+ logs.stoperrorlogging()
end
end)
@@ -542,11 +558,13 @@ function figures.initialize(request)
-- can be determined; at some point the handlers might set them to numbers instead
local w = tonumber(request.width) or 0
local h = tonumber(request.height) or 0
+ local p = tonumber(request.page) or 0
request.width = w > 0 and w or nil
request.height = h > 0 and h or nil
--
- request.page = math.max(tonumber(request.page) or 1,1)
- request.size = images.checksize(request.size)
+ request.page = p > 0 and p or 1
+ request.keepopen = p > 0
+ request.size = checkimagesize(request.size)
request.object = request.object == v_yes
request["repeat"] = request["repeat"] == v_yes
request.preview = request.preview == v_yes
@@ -844,6 +862,9 @@ local function register(askedname,specification)
specification.arguments or ""
)
figures_found[askedhash] = specification
+ if not specification.found then
+ figures.nofmissing = figures.nofmissing + 1
+ end
return specification
end
@@ -896,7 +917,6 @@ local function locate(request) -- name, format, cache
askedname = path
end
else
- -- local fname = methodhandler('finders',pathname .. "/" .. wantedfiles[k])
local foundname = resolvers.findbinfile(askedname)
if not foundname or not lfs.isfile(foundname) then -- foundname can be dummy
if trace_figures then
@@ -1187,7 +1207,7 @@ statistics.register("used graphics",function()
local filename = file.nameonly(environment.jobname) .. "-figures-usage.lua"
if next(figures_found) then
local found = { }
- for _, data in table.sortedhash(figures_found) do
+ for _, data in sortedhash(figures_found) do
found[#found+1] = data
for k, v in next, data do
if v == false or v == "" then
@@ -1259,7 +1279,7 @@ function figures.done(data)
ds.yscale = 1
end
-- sort of redundant but can be limited
- ds.page = ds.page or du.page or dr.page
+ ds.page = ds.page or du.page or dr.page
return data
end
@@ -1331,7 +1351,7 @@ local function checktransform(figure,forced)
local orientation = (forced ~= "" and forced ~= v_auto and forced) or figure.orientation or 0
local transform = transforms["orientation-"..orientation]
figure.transform = transform
- if math.odd(transform) then
+ if odd(transform) then
return figure.height, figure.width
else
return figure.width, figure.height
@@ -1339,6 +1359,8 @@ local function checktransform(figure,forced)
end
end
+local pagecount = { }
+
function checkers.generic(data)
local dr, du, ds = data.request, data.used, data.status
local name = du.fullname or "unknown generic"
@@ -1370,16 +1392,27 @@ function checkers.generic(data)
)
local figure = figures_loaded[hash]
if figure == nil then
- figure = images.new {
+ figure = newimage {
filename = name,
page = page,
pagebox = dr.size,
+ keepopen = dr.keepopen or false,
-- visiblefilename = "", -- this prohibits the full filename ending up in the file
}
codeinjections.setfigurecolorspace(data,figure)
codeinjections.setfiguremask(data,figure)
if figure then
- local f, comment = images.check(images.scan(figure))
+ -- new, bonus check
+ if page and page > 1 then
+ local f = scanimage{ filename = name }
+ if f.page and f.pages < page then
+ report_inclusion("no page %i in %a, using page 1",page,name)
+ page = 1
+ figure.page = page
+ end
+ end
+ -- till here
+ local f, comment = checkimage(scanimage(figure))
if not f then
ds.comment = comment
ds.found = false
@@ -1441,8 +1474,8 @@ function includers.generic(data)
if figure == nil then
figure = ds.private
if figure then
- figure = images.copy(figure)
- figure = figure and images.clone(figure,data.request) or false
+ figure = copyimage(figure)
+ figure = figure and cloneimage(figure,data.request) or false
end
figures_used[hash] = figure
end
@@ -1450,13 +1483,13 @@ function includers.generic(data)
local nr = figures.boxnumber
nofimages = nofimages + 1
ds.pageindex = nofimages
- local image = images.node(figure)
+ local image = imagetonode(figure)
local pager = new_latelua(function()
pofimages[nofimages] = pofimages[nofimages] or tex.count.realpageno -- so when reused we register the first one only
end)
image.next = pager
pager.prev = image
- local box = hpack(image) -- images.node(figure) not longer valid
+ local box = hpack(image) -- imagetonode(figure) not longer valid
indexed[figure.index] = figure
box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet)
@@ -1775,7 +1808,7 @@ end
local function bases_locate(askedlabel)
for i=1,#bases_list do
local entry = bases_list[i]
- local t = bases_find(entry[1],askedlabel)
+ local t = bases_find(entry[1],askedlabel,1,true)
if t then
return t
end
@@ -1865,11 +1898,39 @@ end
-- end,
-- }
--- local fig = figures.push { name = pdffile }
--- figures.identify()
--- figures.check()
--- local nofpages = fig.used.pages
--- figures.pop()
+-- local n = "foo.pdf"
+-- local d = figures.getinfo(n)
+-- if d then
+-- for i=1,d.used.pages do
+-- local p = figures.getinfo(n,i)
+-- if p then
+-- local u = p.used
+-- print(u.width,u.height,u.orientation)
+-- end
+-- end
+-- end
+
+function figures.getinfo(name,page)
+ if type(name) == "string" then
+ name = { name = name, page = page }
+ end
+ if name.name then
+ local data = figures.push(name)
+ figures.identify()
+ figures.check()
+ figures.pop()
+ return data
+ end
+end
+
+function figures.getpdfinfo(name,page,metadata)
+ -- not that useful but as we have it for detailed inclusion we can as
+ -- we expose it
+ if type(name) ~= "table" then
+ name = { name = name, page = page, metadata = metadata }
+ end
+ return codeinjections.getinfo(name)
+end
-- interfacing
@@ -1882,6 +1943,7 @@ implement {
{ "name" },
{ "label" },
{ "page" },
+ { "file" },
{ "size" },
{ "object" },
{ "prefix" },
@@ -1950,7 +2012,7 @@ local registered = { }
local ctx_doexternalfigurerepeat = context.doexternalfigurerepeat
-interfaces.implement {
+implement {
name = "figure_register_page",
arguments = { "string", "string", "string" },
actions = function(a,b,c)
@@ -1959,14 +2021,14 @@ interfaces.implement {
end
}
-interfaces.implement {
+implement {
name = "figure_nof_registered_pages",
actions = function()
context(#registered)
end
}
-interfaces.implement {
+implement {
name = "figure_flush_registered_pages",
arguments = "string",
actions = function(n)
diff --git a/tex/context/base/mkiv/grph-inc.mkiv b/tex/context/base/mkiv/grph-inc.mkiv
index 25058b3f7..677883fbb 100644
--- a/tex/context/base/mkiv/grph-inc.mkiv
+++ b/tex/context/base/mkiv/grph-inc.mkiv
@@ -20,12 +20,12 @@
\writestatus{loading}{ConTeXt Graphic Macros / Figure Inclusion}
-\registerctxluafile{grph-inc}{1.001}
-\registerctxluafile{grph-con}{1.001}
-\registerctxluafile{grph-fil}{1.001}
-\registerctxluafile{grph-mem}{1.001}
-\registerctxluafile{grph-u3d}{1.001} % this will change
-\registerctxluafile{grph-swf}{1.001} % this will change
+\registerctxluafile{grph-inc}{}
+\registerctxluafile{grph-con}{}
+\registerctxluafile{grph-fil}{}
+\registerctxluafile{grph-mem}{}
+\registerctxluafile{grph-u3d}{} % this will become a module
+\registerctxluafile{grph-swf}{} % this will become a module
\unprotect
@@ -56,6 +56,7 @@
\c!prefix =,
\c!cache =,
\c!page =\zerocount,
+ \c!file =,
\c!display =,
\c!mask =,
\c!preset =\v!yes,
@@ -65,8 +66,8 @@
\c!symbol =\v!no,
\c!controls =\v!no,
\c!resources =,
- \c!preview =\v!no
- \c!repeat =\v!no
+ \c!preview =\v!no,
+ \c!repeat =\v!no,
\c!foregroundcolor=,
\c!interaction =\v!none,
\c!hfactor =,
@@ -113,19 +114,19 @@
\newtoks \everyexternalfigureresets % for the moment still public
\newtoks \everyexternalfigurechecks % for the moment still public
-% \useexternalfigure[alpha][koe]
-% \useexternalfigure[beta] [koe] [breedte=1cm]
-% \useexternalfigure[gamma][koe][alpha]
-% \useexternalfigure[delta][koe][alpha][breedte=2cm]
+% \useexternalfigure[alpha][cow]
+% \useexternalfigure[beta] [cow] [width=1cm]
+% \useexternalfigure[gamma][cow][alpha]
+% \useexternalfigure[delta][cow][alpha][width=2cm]
%
-% volle breedte: \externalfigure[koe] \par
-% 3cm breed: \externalfigure[koe] [breedte=3cm] \par
-% volle breedte: \externalfigure[alpha] \par
-% 1cm breed: \externalfigure[beta] \par
-% volle breedte: \externalfigure[gamma] \par
-% 2cm breed: \externalfigure[delta] \par
-% 4cm breed: \externalfigure[beta] [breedte=4cm] \par
-% 5cm breed: \externalfigure[gamma][breedte=5cm] \par
+% full width : \externalfigure[cow] \par
+% 3cm width : \externalfigure[cow] [width=3cm] \par
+% full width : \externalfigure[alpha] \par
+% 1cm width : \externalfigure[beta] \par
+% full width : \externalfigure[gamma] \par
+% 2cm width : \externalfigure[delta] \par
+% 4cm width : \externalfigure[beta] [width=4cm] \par
+% 5cm width : \externalfigure[gamma][width=5cm] \par
%
% \defineexternalfigure[a][width=10cm]
% \defineexternalfigure[b][width=5cm]
@@ -153,14 +154,13 @@
%
% \defineexternalfigure[name][settings]
-%D Defining is persistent, i.e.\ when you redefine an instance,
-%D the already set parameters need to be set again or otherwise
-%D the old values will be used.
-
+%D Defining is persistent, i.e.\ when you redefine an instance, the already set
+%D parameters need to be set again or otherwise the old values will be used.
+%D
%D New: \type {method=auto}: strips suffix and uses \quote {order} which is handy in
-%D some of four workflows where sources are used for web and print and where
-%D the web tools need a suffix (like gif) which we don't want as we want a high
-%D quality format.
+%D some of four workflows where sources are used for web and print and where the web
+%D tools need a suffix (like gif) which we don't want as we want a high quality
+%D format.
\newconditional\c_grph_include_trace_inheritance
@@ -320,6 +320,7 @@
name {\p_grph_include_name}%
label {\ifx\p_label\empty\p_grph_include_label\else\p_label\fi}%
page {\externalfigureparameter\c!page}%
+ file {\externalfigureparameter\c!file}%
size {\externalfigureparameter\c!size}%
object {\externalfigureparameter\c!object}%
prefix {\externalfigureparameter\c!prefix}%
@@ -703,8 +704,6 @@
\letexternalfigureparameter\c!offset\v!overlay
\letexternalfigureparameter\c!width \figurewidth
\letexternalfigureparameter\c!height\figureheight
-% \letexternalfigureparameter\c!align \v!middle
-% \letexternalfigureparameter\c!autowidth\v!no
\inheritedexternalfigureframed{\box\foundexternalfigure}%
\fi
\fi\fi
@@ -857,9 +856,6 @@
% \eTABLE
% \stoptext
-\unexpanded\def\showexternalfigures
- {\writestatus\m!system{the \string\showexternalfigures\space command is not (yet) implemented in mkiv}}
-
\unexpanded\def\overlayfigure#1%
{\externalfigure[#1][\c!width=\d_overlay_width,\c!height=\d_overlay_height]}
@@ -876,18 +872,28 @@
[\v!inline]
[\c!height=\lineheight]
-\defineexternalfigure
+\definemeasure
[\v!combination]
- [\c!width=\dimexpr(%
- \textwidth-\effectiveleftskip-\effectiverightskip
+ [(\textwidth
+ -\effectiveleftskip
+ -\effectiverightskip
-\numexpr\combinationparameter\c!nx-\plusone\relax\dimexpr\combinationparameter\c!distance\relax
- )/\combinationparameter\c!nx\relax]
+ )/\combinationparameter\c!nx]
+
+\defineexternalfigure
+ [\v!combination]
+ [\c!width=\measure{\v!combination}]
% \startcombination[nx=2,ny=1]
% {\externalfigure[dummy][combination]} {}
% {\externalfigure[dummy][combination]} {}
% \stopcombination
+% \startcombination[nx=2,ny=1]
+% {\externalfigure[dummy][width=\measure{combination}]} {}
+% {\externalfigure[dummy][width=\measure{combination}]} {}
+% \stopcombination
+
% \startcombination[nx=2,ny=2]
% {\externalfigure[dummy][combination]} {}
% {\externalfigure[dummy][combination]} {}
@@ -912,9 +918,10 @@
\protect \endinput
-% Moved here because this already old code is nowhere documents (so I need to
-% check it:
+%D Moved here because this already old code is nowhere documents (so I need to check
+%D it:
%
+% \starttyping
% \starttext
%
% \startluaparameterset [u3d:myset:controls:1]
diff --git a/tex/context/base/mkiv/grph-pat.lua b/tex/context/base/mkiv/grph-pat.lua
index c5e4b9f64..89b29906d 100644
--- a/tex/context/base/mkiv/grph-pat.lua
+++ b/tex/context/base/mkiv/grph-pat.lua
@@ -37,7 +37,7 @@ interfaces.implement {
if not name or name == "" then
return
end
- nodes.handlers.finalize(box)
+ nodes.handlers.finalize(box,"object")
names[name] = lpdf.registerpattern {
number = number,
width = specification.width or box.width,
diff --git a/tex/context/base/mkiv/grph-pat.mkiv b/tex/context/base/mkiv/grph-pat.mkiv
index 0126647cc..c5f5fe7cf 100644
--- a/tex/context/base/mkiv/grph-pat.mkiv
+++ b/tex/context/base/mkiv/grph-pat.mkiv
@@ -22,7 +22,7 @@
\unprotect
-\registerctxluafile{grph-pat}{1.001}
+\registerctxluafile{grph-pat}{}
\unexpanded\def\registerpattern
{\begingroup
diff --git a/tex/context/base/mkiv/grph-raw.mkiv b/tex/context/base/mkiv/grph-raw.mkiv
index 8978ba267..4029c7cbd 100644
--- a/tex/context/base/mkiv/grph-raw.mkiv
+++ b/tex/context/base/mkiv/grph-raw.mkiv
@@ -39,7 +39,7 @@
%D draw textext("\bitmapimage[x=200,y=50]{\TestBitmap{50}{200}}") xsized 10cm ;
%D \stopMPcode
-\registerctxluafile{grph-raw}{1.001}
+\registerctxluafile{grph-raw}{}
\unprotect
diff --git a/tex/context/base/mkiv/grph-rul.lua b/tex/context/base/mkiv/grph-rul.lua
index e3d1d8963..03f678973 100644
--- a/tex/context/base/mkiv/grph-rul.lua
+++ b/tex/context/base/mkiv/grph-rul.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['grph-rul'] = {
license = "see context related readme files"
}
+local tonumber, next, type = tonumber, next, type
+
local attributes = attributes
local nodes = nodes
local context = context
@@ -127,13 +129,17 @@ end
do
- local f_rectangle = formatters["%F w %F %F %F %F re %s"]
+ -- maybe %.6F
+
+ local f_rectangle = formatters["%.6F w %.6F %.6F %.6F %.6F re %s"]
+ local f_baselined = formatters["%.6F w %.6F %.6F %.6F %.6F re s %.6F %.6F m %.6F %.6F l s"]
+ local f_dashlined = formatters["%.6F w %.6F %.6F %.6F %.6F re s [%.6F %.6F] 2 d %.6F %.6F m %.6F %.6F l s"]
local f_radtangle = formatters[ [[
- %F w %F %F m
- %F %F l %F %F %F %F y
- %F %F l %F %F %F %F y
- %F %F l %F %F %F %F y
- %F %F l %F %F %F %F y
+ %.6F w %.6F %.6F m
+ %.6F %.6F l %.6F %.6F %.6F %.6F y
+ %.6F %.6F l %.6F %.6F %.6F %.6F y
+ %.6F %.6F l %.6F %.6F %.6F %.6F y
+ %.6F %.6F l %.6F %.6F %.6F %.6F y
h %s
]] ]
@@ -160,6 +166,30 @@ do
ruleactions.draw = ruleactions.fill
ruleactions.stroke = ruleactions.fill
+ local getwhd = nodes.nuts.getwhd
+
+ ruleactions.box = function(p,h,v,i,n)
+ local w, h, d = getwhd(n)
+ local line = p.line or 65536
+ local l = line *bpfactor
+ local w = w * bpfactor
+ local h = h * bpfactor
+ local d = d * bpfactor
+ local o = l / 2
+ if (d >= 0 and h >= 0) or (d <= 0 and h <= 0) then
+ local dashed = tonumber(p.dashed)
+ if dashed and dashed > 5*line then
+ dashed = dashed * bpfactor
+ local delta = (w - 2*dashed*floor(w/(2*dashed)))/2
+ pdfprint("direct",f_dashlined(l,o,o,w-l,h+d-l,dashed,dashed,delta,d,w-delta,d))
+ else
+ pdfprint("direct",f_baselined(l,o,o,w-l,h+d-l,0,d,w,d))
+ end
+ else
+ pdfprint("direct",f_rectangle(l,o,o,w-l,h+d-l))
+ end
+ end
+
end
interfaces.implement {
diff --git a/tex/context/base/mkiv/grph-rul.mkiv b/tex/context/base/mkiv/grph-rul.mkiv
index 98aab3650..cdf39ae42 100644
--- a/tex/context/base/mkiv/grph-rul.mkiv
+++ b/tex/context/base/mkiv/grph-rul.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Graphic Macros / Rule Trickery}
-\registerctxluafile{grph-rul}{1.001}
+\registerctxluafile{grph-rul}{}
\unprotect
diff --git a/tex/context/base/mkiv/grph-swf.lua b/tex/context/base/mkiv/grph-swf.lua
index 8c28b76af..30089cdc4 100644
--- a/tex/context/base/mkiv/grph-swf.lua
+++ b/tex/context/base/mkiv/grph-swf.lua
@@ -84,6 +84,7 @@ function figures.checkers.swf(data)
controls = dr.controls,
-- label = dr.label,
resources = dr.resources,
+ arguments = dr.arguments,
}
context.stopfoundexternalfigure()
return data
diff --git a/tex/context/base/mkiv/grph-trf.mkiv b/tex/context/base/mkiv/grph-trf.mkiv
index 099efa114..6bd0e65cc 100644
--- a/tex/context/base/mkiv/grph-trf.mkiv
+++ b/tex/context/base/mkiv/grph-trf.mkiv
@@ -15,16 +15,14 @@
\unprotect
-%D We probably use too many dimens as the width calculations can
-%D go away. Some of this is an inheritance of limited backends
-%D (some supported fractions, some 1000's, some dimentions) so
-%D we calculate all of them. Nowadays scaling is always available
-%D so we could simplify the code. On the other hand, we now get
-%D some extra values for free.
+%D We probably use too many dimens as the width calculations can go away. Some of
+%D this is an inheritance of limited backends (some supported fractions, some
+%D 1000's, some dimentions) so we calculate all of them. Nowadays scaling is always
+%D available so we could simplify the code. On the other hand, we now get some extra
+%D values for free.
%D
-%D We could move the calculations to \LUA\ and clean up this
-%D lot anyway. On the other hand, there is some danger of messing
-%D up so it has a real low priority.
+%D We could move the calculations to \LUA\ and clean up this lot anyway. On the
+%D other hand, there is some danger of messing up so it has a real low priority.
% local:
@@ -40,6 +38,8 @@
\newconditional\c_grph_scale_scaling_done
\newconditional\c_grph_scale_limit_factors \settrue\c_grph_scale_limit_factors
+\newconditional\c_grph_scale_swap_factor
+
\newdimen\d_grph_scale_wd
\newdimen\d_grph_scale_ht
\newdimen\d_grph_scale_dp
@@ -75,6 +75,7 @@
\installcorenamespace{scale}
\installcorenamespace{scalegrid}
\installcorenamespace{scalenorm}
+\installcorenamespace{scalefact}
\installcommandhandler \??scale {scale} \??scale % we can have instances
@@ -99,8 +100,6 @@
\unexpanded\def\scale{\dodoubleempty\grph_scale}
-% we could have: \freezeparameter\c!scale etc (less backtracking when used multiple)
-
\def\grph_scale[#1][#2]%
{\bgroup
% this is quite common so we might make this a helper
@@ -223,42 +222,6 @@
\let\grph_scale_calculations_report\relax
-% \def\grph_scale_calculations_report
-% {\writestatus
-% {scaled}%
-% {wd:\finalscaleboxwidth,ht:\finalscaleboxheight
-% ,xscale:\finalscaleboxxscale,yscale:\finalscaleboxyscale}}
-
-% \def\grph_scale_calculations_yes
-% {\settrue\c_grph_scale_done
-% % initial values
-% \d_grph_scale_x_offset\zeropoint
-% \d_grph_scale_y_offset\zeropoint
-% \d_grph_scale_x_size \d_grph_scale_wd
-% \d_grph_scale_y_size \d_grph_scale_ht % alleen ht wordt geschaald!
-% % final values
-% \global\d_grph_scale_used_x_size \zeropoint % see note * (core-fig)
-% \global\d_grph_scale_used_y_size \zeropoint % see note * (core-fig)
-% \c_grph_scale_used_x_scale \plusone % see note * (core-fig)
-% \c_grph_scale_used_y_scale \plusone % see note * (core-fig)
-% \let\m_grph_scale_used_x_scale \!!plusone
-% \let\m_grph_scale_used_y_scale \!!plusone
-% % preparations
-% \setfalse\c_grph_scale_scaling_done
-% \grph_scale_check_parameters
-% % calculators
-% % beware, they operate in sequence, and calculate missing dimensions / messy
-% % grph_scale_by_nature % when? needed?
-% \ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_factor \fi
-% \ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_scale \fi
-% \ifconditional\c_grph_scale_scaling_done\else\grph_scale_by_dimension\fi
-% % used in actual scaling
-% \xdef\finalscaleboxwidth {\the\d_grph_scale_used_x_size}%
-% \xdef\finalscaleboxheight {\the\d_grph_scale_used_y_size}%
-% \xdef\finalscaleboxxscale {\luaexpr{\number\c_grph_scale_used_x_scale/1000}}%
-% \xdef\finalscaleboxyscale {\luaexpr{\number\c_grph_scale_used_y_scale/1000}}%
-% \grph_scale_calculations_report}
-
\def\grph_scale_calculations_yes
{\settrue\c_grph_scale_done
% initial values
@@ -315,7 +278,19 @@
% \placefigure{none}{\rotate[frame=on,offset=overlay]{\externalfigure[t:/sources/cow.pdf][width-6]}} \page
% \placefigure{none}{\framed[frame=on,offset=overlay]{\externalfigure[t:/sources/cow.pdf][width-7]}}
-\def\m_grph_scale_factor_set{\v!max,\v!fit,\v!broad,\v!auto} % can be an \edef
+%D The \typpe {min} option makes sure that the smallest available space determines
+%D the max size (so we can get a bleed on the other axis):
+%D
+%D \startlinecorrection
+%D \startcombination[nx=2,ny=2,distance=4cm]
+%D {\externalfigure[hacker][factor=max,maxwidth=6cm,maxheight=6cm]} {}
+%D {\externalfigure[mill] [factor=max,maxwidth=6cm,maxheight=6cm]} {}
+%D {\externalfigure[hacker][factor=min,maxwidth=6cm,maxheight=6cm]} {}
+%D {\externalfigure[mill] [factor=min,maxwidth=6cm,maxheight=6cm]} {}
+%D \stopcombination
+%D \stoplinecorrection
+
+\def\m_grph_scale_factor_set{\v!min,\v!max,\v!fit,\v!broad,\v!auto} % can be an \edef
\def\grph_scale_by_factor
{\doifelseinset\p_factor\m_grph_scale_factor_set
@@ -326,37 +301,9 @@
\grph_scale_by_factor_c
\grph_scale_by_factor_d}}}
-% \def\grph_scale_by_factor_a
-% {\grph_scale_apply_size
-% \ifdim\d_grph_scale_x_size>\d_grph_scale_y_size
-% \grph_scale_calculate_norm \d_grph_scale_used_x_size\p_factor\p_maxwidth\hsize\d_grph_scale_h_size
-% \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size
-% \else
-% \grph_scale_calculate_norm \d_grph_scale_used_y_size\p_factor\p_maxheight\d_grph_scale_outer_v_size\d_grph_scale_v_size
-% \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size
-% \fi
-% \grph_scale_by_factor_indeed}
-%
-% \def\grph_scale_by_factor_b
-% {\grph_scale_apply_size
-% \grph_scale_calculate_norm \d_grph_scale_used_y_size\p_hfactor\p_maxheight\d_grph_scale_outer_v_size\d_grph_scale_v_size
-% \grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size
-% \grph_scale_by_factor_indeed}
-%
-% \def\grph_scale_by_factor_c
-% {\grph_scale_apply_size
-% \grph_scale_calculate_norm \d_grph_scale_used_x_size\p_wfactor\p_maxwidth\hsize\d_grph_scale_h_size
-% \grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size
-% \grph_scale_by_factor_indeed}
-%
-% \def\grph_scale_by_factor_d
-% {\grph_scale_calculate_norm\d_grph_scale_used_y_size\p_factor \p_height \textheight\d_grph_scale_v_size
-% \grph_scale_calculate_norm\d_grph_scale_used_y_size\p_hfactor\p_height \textheight\d_grph_scale_v_size
-% \grph_scale_calculate_norm\d_grph_scale_used_x_size\p_wfactor\p_width \hsize \hsize}
-
\def\grph_scale_by_factor_a
{\grph_scale_apply_size
- \ifdim\d_grph_scale_x_size>\d_grph_scale_y_size
+ \ifdim\d_grph_scale_x_size >\d_grph_scale_y_size
\grph_scale_calculate_norm \d_grph_scale_used_x_size\p_factor\p_maxwidth\hsize\d_grph_scale_h_size
\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size
\d_grph_scale_used_y_size\m_grph_scale_used_x_scale\d_grph_scale_y_size
@@ -387,39 +334,19 @@
\grph_scale_calculate_norm\d_grph_scale_used_x_size\p_wfactor\p_width \hsize \hsize}
\def\grph_scale_by_factor_indeed
- {\settrue\c_grph_scale_scaling_done
+ {\grph_scale_calculate_fact\p_factor
+ \settrue\c_grph_scale_scaling_done
\ifconditional\c_grph_scale_limit_factors
- \ifdim\d_grph_scale_used_x_size>\d_grph_scale_h_size
+ \ifdim\d_grph_scale_used_x_size\ifconditional\c_grph_scale_swap_factor<\else>\fi\d_grph_scale_h_size
\global\d_grph_scale_used_y_size\zeropoint
\global\d_grph_scale_used_x_size\d_grph_scale_h_size
- \else\ifdim\d_grph_scale_used_y_size>\d_grph_scale_v_size
+ \else\ifdim\d_grph_scale_used_y_size\ifconditional\c_grph_scale_swap_factor<\else>\fi\d_grph_scale_v_size
\global\d_grph_scale_used_x_size\zeropoint
\global\d_grph_scale_used_y_size\d_grph_scale_v_size
\fi\fi
\fi
\grph_scale_by_dimension}
-% \def\grph_scale_by_scale
-% {\edef\m_grph_scale_temp{\p_scale\p_xscale\p_yscale}%
-% \ifx\m_grph_scale_temp\empty \else
-% \grph_scale_apply_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale\p_xscale
-% \grph_scale_apply_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale\p_yscale
-% \global\d_grph_scale_used_x_size\zeropoint
-% \global\d_grph_scale_used_y_size\zeropoint
-% \ifx\p_maxwidth\empty
-% \ifx\p_maxheight\empty
-% \else
-% \ifdim\d_grph_scale_y_size>\p_maxheight\relax
-% \global\d_grph_scale_used_y_size\p_maxheight
-% \fi
-% \fi
-% \else
-% \ifdim\d_grph_scale_x_size>\p_maxwidth\relax
-% \global\d_grph_scale_used_x_size\p_maxwidth
-% \fi
-% \fi
-% \fi}
-
\def\grph_scale_by_scale
{\edef\m_grph_scale_temp{\p_scale\p_xscale\p_yscale}%
\ifx\m_grph_scale_temp\empty \else
@@ -460,34 +387,6 @@
\fi
\fi}
-% \def\grph_scale_by_dimension_a
-% {\grph_scale_by_dimension_indeed
-% {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
-% \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}%
-% {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
-% \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}%
-% {\grph_scale_calculate_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale
-% \grph_scale_calculate_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale}}
-%
-% \def\grph_scale_by_dimension_b
-% {\grph_scale_by_dimension_indeed
-% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
-% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
-% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}}
-%
-% \def\grph_scale_by_dimension_c
-% {\grph_scale_by_dimension_indeed
-% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}%
-% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}%
-% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}}
-%
-% \def\grph_scale_by_dimension_d
-% {\grph_scale_by_dimension_indeed
-% {\grph_scale_apply_scale\d_grph_scale_used_x_size\d_grph_scale_x_size\c_grph_scale_used_x_scale\p_xscale
-% \grph_scale_apply_scale\d_grph_scale_used_y_size\d_grph_scale_y_size\c_grph_scale_used_y_scale\p_yscale}%
-% {\grph_scale_calculate_scales\d_grph_scale_used_x_size\d_grph_scale_x_size\d_grph_scale_used_y_size\d_grph_scale_y_size}%
-% {\grph_scale_calculate_scales\d_grph_scale_used_y_size\d_grph_scale_y_size\d_grph_scale_used_x_size\d_grph_scale_x_size}}
-
\def\grph_scale_by_dimension_a
{\grph_scale_by_dimension_indeed
{\grph_scale_calculate_scale\m_grph_scale_used_y_scale\d_grph_scale_used_y_size\d_grph_scale_y_size
@@ -544,6 +443,10 @@
\def\grph_scale_calculate_norm#1#2% todo: swap 1 and 2 and pass one less
{\csname\??scalenorm\ifcsname\??scalenorm#2\endcsname#2\else\s!unknown\fi\endcsname#1#2}
+\def\grph_scale_calculate_fact#1%
+ {\csname\??scalefact\ifcsname\??scalefact#1\endcsname#1\else\s!unknown\fi\endcsname}
+
+%setvalue{\??scalenorm\v!min }#1#2#3#4#5{\global#1#4}
\setvalue{\??scalenorm\v!max }#1#2#3#4#5{\global#1#4}
\setvalue{\??scalenorm\v!fit }#1#2#3#4#5{\global#1#5}
\setvalue{\??scalenorm\v!broad }#1#2#3#4#5{\global#1\dimexpr#5-4\externalfigureparameter\c!bodyfont\relax}
@@ -552,44 +455,30 @@
\setvalue{\??scalenorm\empty }#1#2#3#4#5{\ifx#3\empty\else\global#1#3\fi}
\setvalue{\??scalenorm\s!default}#1#2#3#4#5{\ifx#3\empty\else\global#1#3\fi}
-% \def\grph_scale_calculate_scales#1#2#3#4%
-% {\scratchdimen\dimexpr#1/\dimexpr#2/\plusthousand\relax\relax
-% \c_grph_scale_used_x_scale\scratchdimen
-% \c_grph_scale_used_y_scale\scratchdimen
-% %\writestatus{!!!!}{\the#3/\luaexpr{\number\dimexpr#1\relax/\number\dimexpr#2/1000}}%
-% #3\dimexpr\c_grph_scale_used_x_scale\dimexpr#4/\plusthousand\relax\relax}
-%
-% \def\grph_scale_calculate_scale#1#2#3%
-% {#3\dimexpr#1/\dimexpr#2/\plusthousand\relax\relax}
-%
-% \def\grph_scale_apply_scale#1#2#3#4% #4 = parameter / scale can be empty
-% {\ifcase0#4\relax
-% \ifcase0\p_scale\relax
-% #3=\plusthousand
-% \else
-% #3=\p_scale
-% \fi
+\setvalue{\??scalefact\v!min }{\global\settrue \c_grph_scale_swap_factor}
+\setvalue{\??scalefact\s!unknown}{\global\setfalse\c_grph_scale_swap_factor}
+
+% \setvalue{\??scalenorm\v!min }#1#2#3#4#5% an ugly hack
+% {\ifdim\d_grph_scale_used_x_size>\d_grph_scale_h_size
+% \d_grph_scale_used_y_size\vsize
% \else
-% #3=#4%
-% \fi
-% \relax % important ! still ?
-% \global#1\ifnum#3=\plusthousand#2\else\dimexpr#3\dimexpr#2/\plusthousand\relax\relax\fi
-% \relax}
+% \d_grph_scale_used_x_size\hsize
+% \fi}
+
+\setvalue{\??scalenorm\v!min}#1#2#3#4#5% an ugly hack
+ {\d_grph_scale_used_x_size\hsize
+ \d_grph_scale_used_y_size\vsize}
\def\grph_scale_calculate_scales#1#2%
{\edef\m_grph_scale_used_x_scale{\luaexpr{\number#1/\number#2}}%
\let\m_grph_scale_used_y_scale\m_grph_scale_used_x_scale}
-% we could inline this:
-%
-% \grph_scale_calculate_scale\m_grph_scale_used_x_scale#1#2
-% \let\m_grph_scale_used_y_scale\m_grph_scale_used_x_scale
-
\def\grph_scale_calculate_scale#1#2#3%
{\edef#1{\luaexpr{\number#2/\number#3}}}
\def\grph_scale_apply_scale#1#2% #1 = parameter / scale can be empty
- {\edef#1{\luaexpr
+ {% no overflow
+ \edef#1{\luaexpr
{\number
\ifx#2\empty
\ifx \p_scale \empty \plusthousand \else
@@ -642,31 +531,6 @@
\d_grph_scale_h_size\p_width
\fi}
-%\def\grph_scale_convert_large_scale#1#2#3#4%
-% {\scratchdimen#1\relax
-% \ifnum#3=\plusthousand
-% % == scale 1
-% \else
-% % better 1000 100 10 ranges, evt round 2sp
-% \divide\scratchdimen \plusthousand
-% \multiply\scratchdimen #3\relax
-% \fi
-% \scratchdimen-\scratchdimen % beter hier - dan in driver
-% \edef#2{\the\scratchdimen}%
-% \scratchcounter#3\relax
-% \ifnum\scratchcounter>\plustenthousand
-% \divide\scratchcounter\plusten
-% \scratchdimen\the\scratchcounter\points % \the ?
-% \else
-% \scratchdimen\the\scratchcounter\points % \the ?
-% \divide\scratchdimen\plusten
-% \fi
-% \edef#4{\withoutpt\the\scratchdimen}}
-%
-% \def\grph_scale_convert_large_scale#1#2#3#4% less overflow
-% {\edef#2{\ifnum#3=\plusthousand-\the\dimexpr#1\else\luaexpr{-\number#3*\number\dimexpr#1/1000}sp\fi}%
-% \edef#4{\luaexpr{\number#3/10}}}
-
% \startcombination
% {\externalfigure[cow.pdf] [frame=on,height=3cm,equalwidth=6cm]} {a cow}
% {\externalfigure[mill.png][frame=on,height=3cm,equalwidth=6cm]} {a mill}
@@ -722,9 +586,8 @@
%D \macros
%D {clip, setupclipping}
%D
-%D Although related to figures, clipping can be applied to
-%D arbitrary content. We can use \METAPOST\ to provide a non
-%D rectangular clipping path.
+%D Although related to figures, clipping can be applied to arbitrary content. We can
+%D use \METAPOST\ to provide a non rectangular clipping path.
%D
%D \starttyping
%D \startMPclip{fun}
@@ -739,8 +602,7 @@
%D \clip[x=2,y=1]{\externalfigure[photo]}
%D \stoptyping
%D
-%D When we want to clip to the oval we defined a few lines ago,
-%D we say:
+%D When we want to clip to the oval we defined a few lines ago, we say:
%D
%D \starttyping
%D \clip[nx=1,ny=1,x=1,y=1,mp=fun]{\externalfigure[photo]}
@@ -769,57 +631,6 @@
\grph_clip_yes_finish
\grph_clip_nop_finish}
-% \def\grph_clip_yes_finish
-% {\ifdim\@@cpwidth>\zeropoint
-% \!!dimena\@@cpwidth
-% \!!dimenc\@@cphoffset
-% \else
-% \!!dimena\wd\nextbox
-% \divide\!!dimena \@@cpnx
-% \!!dimenc\@@cpx\!!dimena
-% \advance\!!dimenc -\!!dimena
-% \!!dimena\@@cpsx\!!dimena
-% \fi
-% \relax % sure
-% \ifdim\@@cpheight>\zeropoint
-% \!!dimenb\@@cpheight
-% \!!dimend\ht\nextbox
-% \advance\!!dimend -\@@cpvoffset
-% \advance\!!dimend -\!!dimenb
-% \else
-% \!!dimenb\ht\nextbox
-% \divide\!!dimenb \@@cpny
-% \!!dimend-\@@cpy\!!dimenb
-% \advance\!!dimend -\@@cpsy\!!dimenb
-% \advance\!!dimend \!!dimenb
-% \!!dimenb\@@cpsy\!!dimenb
-% \advance\!!dimend \ht\nextbox % dimend !
-% \fi
-% \setbox\nextbox\hbox % old
-% {\advance\!!dimenc -\@@cpleftoffset % new !
-% \advance\!!dimend -\@@cpbottomoffset % new ! % - added
-% \hskip-\!!dimenc\lower\!!dimend\box\nextbox}% old
-% \wd\nextbox\zeropoint
-% \ht\nextbox\zeropoint
-% \dp\nextbox\zeropoint
-% \setbox\nextbox\hbox
-% {\advance\!!dimena \@@cpleftoffset % new !
-% \advance\!!dimena \@@cprightoffset % new !
-% \advance\!!dimenb \@@cpbottomoffset % new !
-% \advance\!!dimenb \@@cptopoffset % new !
-% \dostartclipping\@@cpmp\!!dimena\!!dimenb % old
-% \box\nextbox
-% \dostopclipping}%
-% \setbox\nextbox\hbox % new !
-% {\!!dimena-\@@cpleftoffset % new !
-% \!!dimenb \@@cpbottomoffset % new ! % - removed
-% \hskip\!!dimena\lower\!!dimenb\box\nextbox}% new !
-% \wd\nextbox\!!dimena
-% \ht\nextbox\!!dimenb
-% \dp\nextbox\zeropoint
-% \box\nextbox
-% \egroup}
-
\def\grph_clip_yes_finish
{\ifdim\clippingparameter\c!width>\zeropoint
\scratchwidth \clippingparameter\c!width
@@ -917,8 +728,7 @@
\box\nextbox
\egroup}
-%D A couple of examples, demonstrating how the depth is
-%D taken care of:
+%D A couple of examples, demonstrating how the depth is taken care of:
%D
%D \startbuffer
%D test\rotate[frame=on, rotation=0] {gans}%
@@ -929,27 +739,20 @@
%D \stopbuffer
%D
%D \typebuffer \getbuffer
-
-% When we rotate over arbitrary angles, we need to relocate the
-% resulting box because rotation brings that box onto the negative
-% axis. The calculations (mostly sin and cosine) need to be tuned for
-% the way a box is packages (i.e. the refence point). A typical example
-% of drawing, scribbling, and going back to the days of school math.
-%
-% We do a bit more calculations than needed, simply because that way
-% it's easier to debug the code.
-
-% Cleanup in progress ... todo: less boxing
+%D
+%D When we rotate over arbitrary angles, we need to relocate the resulting box
+%D because rotation brings that box onto the negative axis. The calculations (mostly
+%D sin and cosine) need to be tuned for the way a box is packages (i.e. the refence
+%D point). A typical example of drawing, scribbling, and going back to the days of
+%D school math.
+%D
+%D We do a bit more calculations than needed, simply because that way it's easier to
+%D debug the code.
\installcorenamespace {rotate}
\installcorenamespace {rotatelocation}
\installcorenamespace {rotatepreset}
-% we can alias these to \d_layers-* to save some dimens or maybe have a generic
-% set of scratch variables
-
-% maybe just \rotation_...
-
\newdimen\d_grph_rotate_x_size
\newdimen\d_grph_rotate_y_size
\newdimen\d_grph_rotate_x_offset
diff --git a/tex/context/base/mkiv/hand-ini.mkiv b/tex/context/base/mkiv/hand-ini.mkiv
index d4bffdb14..f678a88da 100644
--- a/tex/context/base/mkiv/hand-ini.mkiv
+++ b/tex/context/base/mkiv/hand-ini.mkiv
@@ -60,6 +60,7 @@
\def\font_expansion_enable {\normaladjustspacing\plusthree}
\def\font_expansion_enable_k{\normaladjustspacing\plustwo}
+\def\font_expansion_enable_n{\normaladjustspacing\plusone}
\def\font_expansion_disable {\normaladjustspacing\zerocount}
\def\font_protruding_enable {\normalprotrudechars\plustwo }
diff --git a/tex/context/base/mkiv/java-ini.mkiv b/tex/context/base/mkiv/java-ini.mkiv
index d6fe4dd8a..25b8ba600 100644
--- a/tex/context/base/mkiv/java-ini.mkiv
+++ b/tex/context/base/mkiv/java-ini.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt JavaScript Macros / Initialization}
-\registerctxluafile{java-ini}{1.001}
+\registerctxluafile{java-ini}{}
\unprotect
diff --git a/tex/context/base/mkiv/l-bit32.lua b/tex/context/base/mkiv/l-bit32.lua
new file mode 100644
index 000000000..5f35b8fee
--- /dev/null
+++ b/tex/context/base/mkiv/l-bit32.lua
@@ -0,0 +1,150 @@
+if not modules then modules = { } end modules ['l-bit32'] = {
+ version = 1.001,
+ license = "the same as regular Lua",
+ source = "bitwise.lua, v 1.24 2014/12/26 17:20:53 roberto",
+ comment = "drop-in for bit32, adapted a bit by Hans Hagen",
+
+}
+
+-- Lua 5.3 has bitwise operators built in but code meant for 5.2 can expect the
+-- bit32 library to be present. For the moment (and maybe forever) we will ship
+-- the bit32 library as part of LuaTeX bit just in case it is missing, here is a
+-- drop-in. The code is an adapted version of code by Roberto. The Luajit variant
+-- is a mixture of mapping and magic.
+
+if bit32 then
+
+ -- lua 5.2: we're okay
+
+elseif utf8 then
+
+ -- lua 5.3: bitwise.lua, v 1.24 2014/12/26 17:20:53 roberto
+
+ bit32 = load ( [[
+local select = select -- instead of: arg = { ... }
+
+bit32 = {
+ bnot = function (a)
+ return ~a & 0xFFFFFFFF
+ end,
+ band = function (x, y, z, ...)
+ if not z then
+ return ((x or -1) & (y or -1)) & 0xFFFFFFFF
+ else
+ local res = x & y & z
+ for i=1,select("#",...) do
+ res = res & select(i,...)
+ end
+ return res & 0xFFFFFFFF
+ end
+ end,
+ bor = function (x, y, z, ...)
+ if not z then
+ return ((x or 0) | (y or 0)) & 0xFFFFFFFF
+ else
+ local res = x | y | z
+ for i=1,select("#",...) do
+ res = res | select(i,...)
+ end
+ return res & 0xFFFFFFFF
+ end
+ end,
+ bxor = function (x, y, z, ...)
+ if not z then
+ return ((x or 0) ~ (y or 0)) & 0xFFFFFFFF
+ else
+ local res = x ~ y ~ z
+ for i=1,select("#",...) do
+ res = res ~ select(i,...)
+ end
+ return res & 0xFFFFFFFF
+ end
+ end,
+ btest = function (x, y, z, ...)
+ if not z then
+ return (((x or -1) & (y or -1)) & 0xFFFFFFFF) ~= 0
+ else
+ local res = x & y & z
+ for i=1,select("#",...) do
+ res = res & select(i,...)
+ end
+ return (res & 0xFFFFFFFF) ~= 0
+ end
+ end,
+ lshift = function (a, b)
+ return ((a & 0xFFFFFFFF) << b) & 0xFFFFFFFF
+ end,
+ rshift = function (a, b)
+ return ((a & 0xFFFFFFFF) >> b) & 0xFFFFFFFF
+ end,
+ arshift = function (a, b)
+ a = a & 0xFFFFFFFF
+ if b <= 0 or (a & 0x80000000) == 0 then
+ return (a >> b) & 0xFFFFFFFF
+ else
+ return ((a >> b) | ~(0xFFFFFFFF >> b)) & 0xFFFFFFFF
+ end
+ end,
+ lrotate = function (a ,b)
+ b = b & 31
+ a = a & 0xFFFFFFFF
+ a = (a << b) | (a >> (32 - b))
+ return a & 0xFFFFFFFF
+ end,
+ rrotate = function (a, b)
+ b = -b & 31
+ a = a & 0xFFFFFFFF
+ a = (a << b) | (a >> (32 - b))
+ return a & 0xFFFFFFFF
+ end,
+ extract = function (a, f, w)
+ return (a >> f) & ~(-1 << (w or 1))
+ end,
+ replace = function (a, v, f, w)
+ local mask = ~(-1 << (w or 1))
+ return ((a & ~(mask << f)) | ((v & mask) << f)) & 0xFFFFFFFF
+ end,
+}
+ ]] )
+
+elseif bit then
+
+ -- luajit (for now)
+
+ bit32 = load ( [[
+local band, bnot, rshift, lshift = bit.band, bit.bnot, bit.rshift, bit.lshift
+
+bit32 = {
+ arshift = bit.arshift,
+ band = band,
+ bnot = bnot,
+ bor = bit.bor,
+ bxor = bit.bxor,
+ btest = function(...)
+ return band(...) ~= 0
+ end,
+ extract = function(a,f,w)
+ return band(rshift(a,f),2^(w or 1)-1)
+ end,
+ lrotate = bit.rol,
+ lshift = lshift,
+ replace = function(a,v,f,w)
+ local mask = 2^(w or 1)-1
+ return band(a,bnot(lshift(mask,f)))+lshift(band(v,mask),f)
+ end,
+ rrotate = bit.ror,
+ rshift = rshift,
+}
+ ]] )
+
+else
+
+ -- hope for the best or fail
+
+ -- bit32 = require("bit32")
+
+ xpcall(function() local _, t = require("bit32") if t then bit32 = t end return end,function() end)
+
+end
+
+return bit32 or false
diff --git a/tex/context/base/mkiv/l-file.lua b/tex/context/base/mkiv/l-file.lua
index f2a27ad18..5fec0040f 100644
--- a/tex/context/base/mkiv/l-file.lua
+++ b/tex/context/base/mkiv/l-file.lua
@@ -96,6 +96,10 @@ function lfs.isfile(name)
return attributes(name,"mode") == "file"
end
+function lfs.isfound(name)
+ return attributes(name,"mode") == "file" and name or nil
+end
+
local colon = P(":")
local period = P(".")
local periods = P("..")
@@ -699,3 +703,25 @@ function lfs.mkdirs(path)
lfs.mkdir(full)
end
end
+
+-- here is oen i ran into when messign around with xavante code (keppler project)
+-- where it's called in_base .. no gain in using lpeg here
+
+function file.withinbase(path) -- don't go beyond root
+ local l = 0
+ if not find(path,"^/") then
+ path = "/" .. path
+ end
+ for dir in gmatch(path,"/([^/]+)") do
+ if dir == ".." then
+ l = l - 1
+ elseif dir ~= "." then
+ l = l + 1
+ end
+ if l < 0 then
+ return false
+ end
+ end
+ return true
+end
+
diff --git a/tex/context/base/mkiv/l-io.lua b/tex/context/base/mkiv/l-io.lua
index 75e704a34..fa3ad3c4c 100644
--- a/tex/context/base/mkiv/l-io.lua
+++ b/tex/context/base/mkiv/l-io.lua
@@ -10,7 +10,7 @@ local io = io
local open, flush, write, read = io.open, io.flush, io.write, io.read
local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
local concat = table.concat
-local floor = math.floor
+----- floor = math.floor
local type = type
if string.find(os.getenv("PATH"),";",1,true) then
@@ -27,9 +27,9 @@ end
-- to less intermediate large allocations. This phenomena was discussed on the
-- luatex dev list.
-local large = 2^24 -- 16 MB
-local medium = large / 16 -- 1 MB
-local small = medium / 8
+local large = 0x01000000 -- 2^24 16.777.216
+local medium = 0x00100000 -- 2^20 1.048.576
+local small = 0x00020000 -- 2^17 131.072
-- local function readall(f)
-- local size = f:seek("end")
@@ -54,7 +54,7 @@ local small = medium / 8
-- end
local function readall(f)
--- return f:read("*all")
+ -- return f:read("*all")
local size = f:seek("end")
if size > 0 then
f:seek("set",0)
diff --git a/tex/context/base/mkiv/l-lpeg.lua b/tex/context/base/mkiv/l-lpeg.lua
index c34ba6ad4..a7ebd567d 100644
--- a/tex/context/base/mkiv/l-lpeg.lua
+++ b/tex/context/base/mkiv/l-lpeg.lua
@@ -6,6 +6,10 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
license = "see context related readme files"
}
+-- we can get too many captures (e.g. on largexml files) which makes me wonder
+-- if P(foo)/"" can't be simplfied to N(foo) i.e. some direct instruction to the
+-- lpeg virtual machine to ignore it
+
-- lpeg 12 vs lpeg 10: slower compilation, similar parsing speed (i need to check
-- if i can use new features like capture / 2 and .B (at first sight the xml
-- parser is some 5% slower)
@@ -17,7 +21,13 @@ if not modules then modules = { } end modules ['l-lpeg'] = {
-- move utf -> l-unicode
-- move string -> l-string or keep it here
-lpeg = require("lpeg")
+-- lpeg.B : backward without consumption
+-- lpeg.F = getmetatable(lpeg.P(1)).__len : forward without consumption
+
+
+lpeg = require("lpeg") -- does lpeg register itself global?
+
+local lpeg = lpeg
-- The latest lpeg doesn't have print any more, and even the new ones are not
-- available by default (only when debug mode is enabled), which is a pitty as
@@ -103,11 +113,14 @@ patterns.alwaysmatched = alwaysmatched
local sign = S('+-')
local zero = P('0')
local digit = R('09')
+local digits = digit^1
local octdigit = R("07")
+local octdigits = octdigit^1
local lowercase = R("az")
local uppercase = R("AZ")
local underscore = P("_")
local hexdigit = digit + lowercase + uppercase
+local hexdigits = hexdigit^1
local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
----- newline = crlf + S("\r\n") -- cr + lf
local newline = P("\r") * (P("\n") + P(true)) + P("\n") -- P("\r")^-1 * P("\n")^-1
@@ -240,33 +253,36 @@ patterns.doublequoted = dquote * patterns.nodquote * dquote
patterns.quoted = patterns.doublequoted + patterns.singlequoted
patterns.digit = digit
+patterns.digits = digits
patterns.octdigit = octdigit
+patterns.octdigits = octdigits
patterns.hexdigit = hexdigit
+patterns.hexdigits = hexdigits
patterns.sign = sign
-patterns.cardinal = digit^1
-patterns.integer = sign^-1 * digit^1
-patterns.unsigned = digit^0 * period * digit^1
+patterns.cardinal = digits
+patterns.integer = sign^-1 * digits
+patterns.unsigned = digit^0 * period * digits
patterns.float = sign^-1 * patterns.unsigned
-patterns.cunsigned = digit^0 * comma * digit^1
-patterns.cpunsigned = digit^0 * (period + comma) * digit^1
+patterns.cunsigned = digit^0 * comma * digits
+patterns.cpunsigned = digit^0 * (period + comma) * digits
patterns.cfloat = sign^-1 * patterns.cunsigned
patterns.cpfloat = sign^-1 * patterns.cpunsigned
patterns.number = patterns.float + patterns.integer
patterns.cnumber = patterns.cfloat + patterns.integer
patterns.cpnumber = patterns.cpfloat + patterns.integer
-patterns.oct = zero * octdigit^1
+patterns.oct = zero * octdigits
patterns.octal = patterns.oct
patterns.HEX = zero * P("X") * (digit+uppercase)^1
patterns.hex = zero * P("x") * (digit+lowercase)^1
-patterns.hexadecimal = zero * S("xX") * hexdigit^1
+patterns.hexadecimal = zero * S("xX") * hexdigits
patterns.hexafloat = sign^-1
* zero * S("xX")
- * (hexdigit^0 * period * hexdigit^1 + hexdigit^1 * period * hexdigit^0 + hexdigit^1)
- * (S("pP") * sign^-1 * hexdigit^1)^-1
+ * (hexdigit^0 * period * hexdigits + hexdigits * period * hexdigit^0 + hexdigits)
+ * (S("pP") * sign^-1 * hexdigits)^-1
patterns.decafloat = sign^-1
- * (digit^0 * period * digit^1 + digit^1 * period * digit^0 + digit^1)
- * S("eE") * sign^-1 * digit^1
+ * (digit^0 * period * digits + digits * period * digit^0 + digits)
+ * S("eE") * sign^-1 * digits
patterns.propername = (uppercase + lowercase + underscore) * (uppercase + lowercase + underscore + digit)^0 * endofstring
@@ -597,19 +613,27 @@ end
-- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc"))
-- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc"))
--- -- slower:
+-- this was slower but lpeg has been sped up in the meantime, so we no longer
+-- use this (still seems somewhat faster on long strings)
+--
+-- local nany = utf8char/""
--
-- function lpeg.counter(pattern)
--- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
--- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
+-- pattern = Cs((P(pattern)/" " + nany)^0)
+-- return function(str)
+-- return #lpegmatch(pattern,str)
+-- end
-- end
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #lpegmatch(pattern,str)
+function lpeg.counter(pattern,action)
+ local n = 0
+ local pattern = (P(pattern) / function() n = n + 1 end + anything)^0
+ ----- pattern = (P(pattern) * (P(true) / function() n = n + 1 end) + anything)^0
+ ----- pattern = (P(pattern) * P(function() n = n + 1 end) + anything)^0
+ if action then
+ return function(str) n = 0 ; lpegmatch(pattern,str) ; action(n) end
+ else
+ return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
end
end
@@ -841,48 +865,42 @@ end
local p_false = P(false)
local p_true = P(true)
--- local function making(t)
--- local p = p_false
--- local keys = sortedkeys(t)
--- for i=1,#keys do
--- local k = keys[i]
--- if k ~= "" then
+-- local function collapse(t,x)
+-- if type(t) ~= "table" then
+-- return t, x
+-- else
+-- local n = next(t)
+-- if n == nil then
+-- return t, x
+-- elseif next(t,n) == nil then
+-- -- one entry
+-- local k = n
-- local v = t[k]
--- if v == true then
--- p = p + P(k) * p_true
--- elseif v == false then
--- -- can't happen
+-- if type(v) == "table" then
+-- return collapse(v,x..k)
-- else
--- p = p + P(k) * making(v)
+-- return v, x .. k
-- end
--- end
--- end
--- if t[""] then
--- p = p + p_true
--- end
--- return p
--- end
-
--- local function make(t)
--- local p = p_false
--- local keys = sortedkeys(t)
--- for i=1,#keys do
--- local k = keys[i]
--- if k ~= "" then
--- local v = t[k]
--- if v == true then
--- p = p + P(k) * p_true
--- elseif v == false then
--- -- can't happen
--- else
--- p = p + P(k) * making(v)
+-- else
+-- local tt = { }
+-- for k, v in next, t do
+-- local vv, kk = collapse(v,k)
+-- tt[kk] = vv
-- end
+-- return tt, x
-- end
-- end
--- return p
-- end
-local function make(t,rest)
+local lower = utf and utf.lower or string.lower
+local upper = utf and utf.upper or string.upper
+
+function lpeg.setutfcasers(l,u)
+ lower = l or lower
+ upper = u or upper
+end
+
+local function make1(t,rest)
local p = p_false
local keys = sortedkeys(t)
for i=1,#keys do
@@ -894,7 +912,7 @@ local function make(t,rest)
elseif v == false then
-- can't happen
else
- p = p + P(k) * make(v,v[""])
+ p = p + P(k) * make1(v,v[""])
end
end
end
@@ -904,34 +922,29 @@ local function make(t,rest)
return p
end
-local function collapse(t,x)
- if type(t) ~= "table" then
- return t, x
- else
- local n = next(t)
- if n == nil then
- return t, x
- elseif next(t,n) == nil then
- -- one entry
- local k = n
+local function make2(t,rest) -- only ascii
+ local p = p_false
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ if k ~= "" then
local v = t[k]
- if type(v) == "table" then
- return collapse(v,x..k)
+ if v == true then
+ p = p + (P(lower(k))+P(upper(k))) * p_true
+ elseif v == false then
+ -- can't happen
else
- return v, x .. k
- end
- else
- local tt = { }
- for k, v in next, t do
- local vv, kk = collapse(v,k)
- tt[kk] = vv
+ p = p + (P(lower(k))+P(upper(k))) * make2(v,v[""])
end
- return tt, x
end
end
+ if rest then
+ p = p + p_true
+ end
+ return p
end
-function lpeg.utfchartabletopattern(list) -- goes to util-lpg
+function lpeg.utfchartabletopattern(list,insensitive) -- goes to util-lpg
local tree = { }
local n = #list
if n == 0 then
@@ -1004,9 +1017,9 @@ function lpeg.utfchartabletopattern(list) -- goes to util-lpg
end
end
end
--- collapse(tree,"") -- needs testing, maybe optional, slightly faster because P("x")*P("X") seems slower than P"(xX") (why)
--- inspect(tree)
- return make(tree)
+ -- collapse(tree,"") -- needs testing, maybe optional, slightly faster because P("x")*P("X") seems slower than P"(xX") (why)
+ -- inspect(tree)
+ return (insensitive and make2 or make1)(tree)
end
-- local t = { "start", "stoep", "staart", "paard" }
@@ -1112,7 +1125,7 @@ end
local trailingzeros = zero^0 * -digit -- suggested by Roberto R
local case_1 = period * trailingzeros / ""
local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
+local number = digits * (case_1 + case_2)
local stripper = Cs((number + 1)^0)
lpeg.patterns.stripzeros = stripper
diff --git a/tex/context/base/mkiv/l-lua.lua b/tex/context/base/mkiv/l-lua.lua
index 88cde6d1e..426706f06 100644
--- a/tex/context/base/mkiv/l-lua.lua
+++ b/tex/context/base/mkiv/l-lua.lua
@@ -17,28 +17,37 @@ if not modules then modules = { } end modules ['l-lua'] = {
-- utf.*
-- bit32
--- compatibility hacksand helpers
+local next, type, tonumber = next, type, tonumber
-_MAJORVERSION, _MINORVERSION = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+-- compatibility hacks and helpers
-_MAJORVERSION = tonumber(_MAJORVERSION) or 5
-_MINORVERSION = tonumber(_MINORVERSION) or 1
-_LUAVERSION = _MAJORVERSION + _MINORVERSION/10
+LUAMAJORVERSION, LUAMINORVERSION = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
-if _LUAVERSION < 5.2 and jit then
+LUAMAJORVERSION = tonumber(LUAMAJORVERSION) or 5
+LUAMINORVERSION = tonumber(LUAMINORVERSION) or 1
+LUAVERSION = LUAMAJORVERSION + LUAMINORVERSION/10
+
+if LUAVERSION < 5.2 and jit then
--
-- we want loadstring cum suis to behave like 5.2
--
- _MINORVERSION = 2
- _LUAVERSION = 5.2
+ MINORVERSION = 2
+ LUAVERSION = 5.2
end
+_LUAVERSION = LUAVERSION -- for old times sake, will go away
+
-- lpeg
if not lpeg then
lpeg = require("lpeg")
end
+-- if utf8 then
+-- utf8lua = utf8
+-- utf8 = nil
+-- end
+
-- basics:
if loadstring then
@@ -220,3 +229,24 @@ if not FFISUPPORTED then
elseif not ffi.number then
ffi.number = tonumber
end
+
+if not bit32 then -- and utf8 then
+ -- bit32 = load ( [[ -- replacement code with 5.3 syntax so that 5.2 doesn't bark on it ]] )
+ bit32 = require("l-bit32")
+end
+
+-- We need this due a bug in luatex socket loading:
+
+local loaded = package.loaded
+
+if not loaded["socket"] then loaded["socket"] = loaded["socket.core"] end
+if not loaded["mime"] then loaded["mime"] = loaded["mime.core"] end
+
+if not socket.mime then socket.mime = package.loaded["mime"] end
+
+if not loaded["socket.mime"] then loaded["socket.mime"] = socket.mime end
+if not loaded["socket.http"] then loaded["socket.http"] = socket.http end
+if not loaded["socket.ftp"] then loaded["socket.ftp"] = socket.ftp end
+if not loaded["socket.smtp"] then loaded["socket.smtp"] = socket.smtp end
+if not loaded["socket.tp"] then loaded["socket.tp"] = socket.tp end
+if not loaded["socket.url"] then loaded["socket.url"] = socket.url end
diff --git a/tex/context/base/mkiv/l-macro-imp-optimize.lua b/tex/context/base/mkiv/l-macro-imp-optimize.lua
new file mode 100644
index 000000000..e04b37eab
--- /dev/null
+++ b/tex/context/base/mkiv/l-macro-imp-optimize.lua
@@ -0,0 +1,62 @@
+if not modules then modules = { } end modules ['l-macro-imp-optimize'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is for ConTeXt only and used in development. Only in rare cases we
+-- will use this to gain a bit of performance or adapt to specific versions
+-- of Lua.
+
+-- There is no real gain as we hardly use these:
+--
+-- lua.macros.resolvestring [[
+-- #define div(a,b) floor(a/b)
+-- #define mod(a,b) (a % b)
+-- #define odd(a) (a % 2 ~= 0)
+-- #define even(a) (a % 2 == 0)
+-- #define pow(x,y) (x^y)
+-- ]]
+
+if LUAVERSION >= 5.3 and lua.macros then
+
+ -- For the moment we only optimize in Lua 5.3:
+
+ lua.macros.enabled = true
+
+ -- This indirect method makes it possible to use both the functions
+ -- and the inline variant (which often looks better). Also, a mixed
+ -- 5,2 and 5.3 source is not possible because the 5.2 doesn't deal
+ -- with the newer 5.3 syntax.
+
+ -- We need to check for 64 usage: 0xFFFFFFFFFFFFFFFF (-1)
+
+ -- lua.macros.resolvestring [[
+ -- #define band(a,b) (a & b)
+ -- #define bnot(a) (~a & 0xFFFFFFFF)
+ -- #define bor(a,b) ((a | b) & 0xFFFFFFFF)
+ -- #define btest(a,b) ((a & b) ~= 0)
+ -- #define bxor(a,b) ((a ~ b) & 0xFFFFFFFF)
+ -- #define rshift(a,b) ((a & b) ~= 0)
+ -- #define extract(a,b,c) ((a >> b) & ~(-1 << c))
+ -- #define extract(a,b) ((a >> b) & 0x1))
+ -- #define lshift(a,b) ((a << b) & 0xFFFFFFFF)
+ -- #define rshift(a,b) ((a >> b) & 0xFFFFFFFF)
+ -- ]]
+
+ lua.macros.resolvestring [[
+ #define band(a,b) (a&b)
+ #define bnot(a) (~a&0xFFFFFFFF)
+ #define bor(a,b) ((a|b)&0xFFFFFFFF)
+ #define btest(a,b) ((a&b)~=0)
+ #define bxor(a,b) ((a~b)&0xFFFFFFFF)
+ #define rshift(a,b) ((a&b)~=0)
+ #define extract(a,b,c) ((a>>b)&~(-1<>b)&0x1))
+ #define lshift(a,b) ((a<>b)&0xFFFFFFFF)
+ ]]
+
+end
diff --git a/tex/context/base/mkiv/l-macro.lua b/tex/context/base/mkiv/l-macro.lua
new file mode 100644
index 000000000..d2b8c7ae4
--- /dev/null
+++ b/tex/context/base/mkiv/l-macro.lua
@@ -0,0 +1,225 @@
+if not modules then modules = { } end modules ['l-macros'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is actually rather old code that I made as a demo for Luigi but that
+-- now comes in handy when we switch to Lua 5.3. The reason for using it (in
+-- in transition) is that we cannot mix 5.3 bit operators in files that get
+-- loaded in 5.2 (parsing happens before conditional testing).
+
+local S, P, R, V, C, Cs, Cc, Ct, Carg = lpeg.S, lpeg.P, lpeg.R, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Carg
+local lpegmatch = lpeg.match
+local concat = table.concat
+local next = next
+
+local newline = S("\n\r")^1
+local continue = P("\\") * newline
+local spaces = S(" \t") + continue
+local name = R("az","AZ","__","09")^1
+local body = ((1+continue/"")-newline)^1
+local lparent = P("(")
+local rparent = P(")")
+local noparent = 1 - (lparent + rparent)
+local nested = P { lparent * (noparent + V(1))^0 * rparent }
+local escaped = P("\\") * P(1)
+local squote = P("'")
+local dquote = P('"')
+local quoted = dquote * (escaped + (1-dquote))^0 * dquote
+ + squote * (escaped + (1-squote))^0 * squote
+
+local arguments = lparent * Ct((Cs((nested+(quoted + 1 - S("),")))^1) + S(", "))^0) * rparent
+
+local macros = lua.macros or { }
+lua.macros = macros
+
+local patterns = { }
+local definitions = { }
+local resolve
+local subparser
+
+-- todo: zero case
+
+resolve = C(C(name) * arguments^-1) / function(raw,s,a)
+ local d = definitions[s]
+ if d then
+ if a then
+ local n = #a
+ local p = patterns[s][n]
+ if p then
+ local d = d[n]
+ for i=1,n do
+ a[i] = lpegmatch(subparser,a[i]) or a[i]
+ end
+ return lpegmatch(p,d,1,a) or d
+ else
+ return raw
+ end
+ else
+ return d[0] or raw
+ end
+ elseif a then
+ for i=1,#a do
+ a[i] = lpegmatch(subparser,a[i]) or a[i]
+ end
+ return s .. "(" .. concat(a,",") .. ")"
+ else
+ return raw
+ end
+end
+
+subparser = Cs((resolve + P(1))^1)
+
+local enddefine = P("#enddefine") / ""
+
+local beginregister = (C(name) * spaces^0 * (arguments + Cc(false)) * C((1-enddefine)^1) * enddefine) / function(k,a,v)
+ local n = 0
+ if a then
+ n = #a
+ local pattern = P(false)
+ for i=1,n do
+ pattern = pattern + (P(a[i]) * Carg(1)) / function(t) return t[i] end
+ end
+ pattern = Cs((pattern + P(1))^1)
+ local p = patterns[k]
+ if not p then
+ p = { [0] = false, false, false, false, false, false, false, false, false }
+ patterns[k] = p
+ end
+ p[n] = pattern
+ end
+ local d = definitions[k]
+ if not d then
+ d = { [0] = false, false, false, false, false, false, false, false, false }
+ definitions[k] = d
+ end
+ d[n] = lpegmatch(subparser,v) or v
+ return ""
+end
+
+local register = (C(name) * spaces^0 * (arguments + Cc(false)) * spaces^0 * C(body)) / function(k,a,v)
+ local n = 0
+ if a then
+ n = #a
+ local pattern = P(false)
+ for i=1,n do
+ pattern = pattern + (P(a[i]) * Carg(1)) / function(t) return t[i] end
+ end
+ pattern = Cs((pattern + P(1))^1)
+ local p = patterns[k]
+ if not p then
+ p = { [0] = false, false, false, false, false, false, false, false, false }
+ patterns[k] = p
+ end
+ p[n] = pattern
+ end
+ local d = definitions[k]
+ if not d then
+ d = { [0] = false, false, false, false, false, false, false, false, false }
+ definitions[k] = d
+ end
+ d[n] = lpegmatch(subparser,v) or v
+ return ""
+end
+
+local unregister = (C(name) * spaces^0 * (arguments + Cc(false))) / function(k,a)
+ local n = 0
+ if a then
+ n = #a
+ local p = patterns[k]
+ if p then
+ p[n] = false
+ end
+ end
+ local d = definitions[k]
+ if d then
+ d[n] = false
+ end
+ return ""
+end
+
+local begindefine = (P("begindefine") * spaces^0 / "") * beginregister
+local define = (P("define" ) * spaces^0 / "") * register
+local undefine = (P("undefine" ) * spaces^0 / "") * unregister
+
+local parser = Cs( ( ( (P("#")/"") * (define + begindefine + undefine) * (newline^0/"") ) + resolve + P(1) )^0 )
+
+function macros.reset()
+ definitions = { }
+ patterns = { }
+end
+
+function macros.resolvestring(str)
+ return lpegmatch(parser,str) or str
+end
+
+function macros.resolving()
+ return next(patterns)
+end
+
+-- local str = [[
+-- #define check(p,q) (p ~= 0) and (p > q)
+--
+-- #define oeps a > 10
+--
+-- #define whatever oeps
+--
+-- if whatever and check(1,2) then print("!") end
+-- if whatever and check(1,3) then print("!") end
+-- if whatever and check(1,4) then print("!") end
+-- if whatever and check(1,5) then print("!") end
+-- if whatever and check(1,6) then print("!") end
+-- if whatever and check(1,7) then print("!") end
+-- ]]
+--
+-- print(macros.resolvestring(str))
+--
+-- macros.resolvestring(io.loaddata("mymacros.lua"))
+-- loadstring(macros.resolvestring(io.loaddata("mytestcode.lua")))
+
+-- local luamacros = [[
+-- #begindefine setnodecodes
+-- local nodecodes = nodes.codes
+-- local hlist_code = nodecodes.hlist
+-- local vlist_code = nodecodes.vlist
+-- local glyph_code = nodecodes.glyph
+-- #enddefine
+--
+-- #define hlist(id) id == hlist_code
+-- #define vlist(id) id == vlist_code
+-- #define glyph(id) id == glyph_code
+-- ]]
+--
+-- local luacode = [[
+-- setnodecodes
+--
+-- if hlist(id) or vlist(id) then
+-- print("we have a list")
+-- elseif glyph(id) then
+-- print("we have a glyph")
+-- else
+-- print("i'm stymied")
+-- end
+--
+-- local z = band(0x23,x)
+-- local z = btest(0x23,x)
+-- local z = rshift(0x23,x)
+-- local z = lshift(0x23,x)
+-- ]]
+--
+-- require("l-macros-test-001")
+--
+-- macros.resolvestring(luamacros)
+--
+-- local newcode = macros.resolvestring(luacode)
+--
+-- print(newcode)
+--
+-- macros.reset()
+
+-- local d = io.loaddata("t:/sources/font-otr.lua")
+-- local n = macros.resolvestring(d)
+-- io.savedata("r:/tmp/o.lua",n)
diff --git a/tex/context/base/mkiv/l-math.lua b/tex/context/base/mkiv/l-math.lua
index ec62919b4..e5668a5db 100644
--- a/tex/context/base/mkiv/l-math.lua
+++ b/tex/context/base/mkiv/l-math.lua
@@ -6,33 +6,144 @@ if not modules then modules = { } end modules ['l-math'] = {
license = "see context related readme files"
}
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
if not math.ceiling then
+
math.ceiling = math.ceil
+
end
if not math.round then
+
+ local floor = math.floor
+
function math.round(x) return floor(x + 0.5) end
+
end
if not math.div then
+
+ local floor = math.floor
+
function math.div(n,m) return floor(n/m) end
+
end
if not math.mod then
+
function math.mod(n,m) return n % m end
-end
-local pipi = 2*math.pi/360
+end
if not math.sind then
+
+ local sin, cos, tan = math.sin, math.cos, math.tan
+
+ local pipi = 2*math.pi/360
+
function math.sind(d) return sin(d*pipi) end
function math.cosd(d) return cos(d*pipi) end
function math.tand(d) return tan(d*pipi) end
+
end
if not math.odd then
+
function math.odd (n) return n % 2 ~= 0 end
function math.even(n) return n % 2 == 0 end
+
+end
+
+if not math.cosh then
+
+ local exp = math.exp
+
+ function math.cosh(x)
+ local xx = exp(x)
+ return (xx+1/xx)/2
+ end
+ function math.sinh(x)
+ local xx = exp(x)
+ return (xx-1/xx)/2
+ end
+ function math.tanh(x)
+ local xx = exp(x)
+ return (xx-1/xx)/(xx+1/xx)
+ end
+
+end
+
+if not math.pow then
+
+ function math.pow(x,y)
+ return x^y
+ end
+
+end
+
+if not math.atan2 then
+
+ math.atan2 = math.atan
+
+end
+
+if not math.ldexp then
+
+ function math.ldexp(x,e)
+ return x * 2.0^e
+ end
+
+end
+
+-- if not math.frexp then
+--
+-- -- not a oneliner so use a math library instead
+--
+-- function math.frexp(x,e)
+-- -- returns m and e such that x = m2e, e is an integer and the absolute
+-- -- value of m is in the range [0.5, 1) (or zero when x is zero)
+-- end
+--
+-- end
+
+if not math.log10 then
+
+ local log = math.log
+
+ function math.log10(x)
+ return log(x,10)
+ end
+
+end
+
+if not math.type then
+
+ function math.type()
+ return "float"
+ end
+
+end
+
+if not math.tointeger then
+
+ math.mininteger = -0x4FFFFFFFFFFF
+ math.maxinteger = 0x4FFFFFFFFFFF
+
+ local floor = math.floor
+
+ function math.tointeger(n)
+ local f = floor(n)
+ return f == n and f or nil
+ end
+
+end
+
+if not math.ult then
+
+ local floor = math.floor
+
+ function math.tointeger(m,n)
+ -- not ok but i'm not motivated to look into it now
+ return floor(m) < floor(n) -- unsigned comparison needed
+ end
+
end
diff --git a/tex/context/base/mkiv/l-number.lua b/tex/context/base/mkiv/l-number.lua
index c6f1e3359..a83e8f8f9 100644
--- a/tex/context/base/mkiv/l-number.lua
+++ b/tex/context/base/mkiv/l-number.lua
@@ -18,46 +18,50 @@ local floor = math.floor
number = number or { }
local number = number
-if bit32 then -- I wonder if this is faster
+-- begin obsolete code --
- local btest, bor = bit32.btest, bit32.bor
-
- function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
- end
-
- number.hasbit = btest
- number.setbit = bor
-
- function number.setbit(x,p) -- why not bor?
- return btest(x,p) and x or x + p
- end
-
- function number.clearbit(x,p)
- return btest(x,p) and x - p or x
- end
-
-else
-
- -- http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html
-
- function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
- end
-
- function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
- return x % (p + p) >= p
- end
-
- function number.setbit(x, p)
- return (x % (p + p) >= p) and x or x + p
- end
-
- function number.clearbit(x, p)
- return (x % (p + p) >= p) and x - p or x
- end
+-- if bit32 then
+--
+-- local btest, bor = bit32.btest, bit32.bor
+--
+-- function number.bit(p)
+-- return 2 ^ (p - 1) -- 1-based indexing
+-- end
+--
+-- number.hasbit = btest
+-- number.setbit = bor
+--
+-- function number.setbit(x,p) -- why not bor?
+-- return btest(x,p) and x or x + p
+-- end
+--
+-- function number.clearbit(x,p)
+-- return btest(x,p) and x - p or x
+-- end
+--
+-- else
+--
+-- -- http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html
+--
+-- function number.bit(p)
+-- return 2 ^ (p - 1) -- 1-based indexing
+-- end
+--
+-- function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
+-- return x % (p + p) >= p
+-- end
+--
+-- function number.setbit(x, p)
+-- return (x % (p + p) >= p) and x or x + p
+-- end
+--
+-- function number.clearbit(x, p)
+-- return (x % (p + p) >= p) and x - p or x
+-- end
+--
+-- end
-end
+-- end obsolete code --
-- print(number.tobitstring(8))
-- print(number.tobitstring(14))
@@ -152,60 +156,60 @@ function number.toevenhex(n)
end
end
--- a,b,c,d,e,f = number.toset(100101)
+-- -- a,b,c,d,e,f = number.toset(100101)
+-- --
+-- -- function number.toset(n)
+-- -- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
+-- -- end
+-- --
+-- -- -- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
+-- -- -- on
+-- --
+-- -- for i=1,1000000 do
+-- -- local a,b,c,d,e,f,g,h = number.toset(12345678)
+-- -- local a,b,c,d = number.toset(1234)
+-- -- local a,b,c = number.toset(123)
+-- -- local a,b,c = number.toset("123")
+-- -- end
+--
+-- local one = lpeg.C(1-lpeg.S('')/tonumber)^1
--
-- function number.toset(n)
--- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
+-- return lpegmatch(one,tostring(n))
-- end
--
--- -- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
--- -- on
+-- -- function number.bits(n,zero)
+-- -- local t, i = { }, (zero and 0) or 1
+-- -- while n > 0 do
+-- -- local m = n % 2
+-- -- if m > 0 then
+-- -- insert(t,1,i)
+-- -- end
+-- -- n = floor(n/2)
+-- -- i = i + 1
+-- -- end
+-- -- return t
+-- -- end
+-- --
+-- -- -- a bit faster
--
--- for i=1,1000000 do
--- local a,b,c,d,e,f,g,h = number.toset(12345678)
--- local a,b,c,d = number.toset(1234)
--- local a,b,c = number.toset(123)
--- local a,b,c = number.toset("123")
--- end
-
-local one = lpeg.C(1-lpeg.S('')/tonumber)^1
-
-function number.toset(n)
- return lpegmatch(one,tostring(n))
-end
-
--- function number.bits(n,zero)
--- local t, i = { }, (zero and 0) or 1
--- while n > 0 do
+-- local function bits(n,i,...)
+-- if n > 0 then
-- local m = n % 2
+-- local n = floor(n/2)
-- if m > 0 then
--- insert(t,1,i)
+-- return bits(n, i+1, i, ...)
+-- else
+-- return bits(n, i+1, ...)
-- end
--- n = floor(n/2)
--- i = i + 1
+-- else
+-- return ...
-- end
--- return t
-- end
--
--- -- a bit faster
-
-local function bits(n,i,...)
- if n > 0 then
- local m = n % 2
- local n = floor(n/2)
- if m > 0 then
- return bits(n, i+1, i, ...)
- else
- return bits(n, i+1, ...)
- end
- else
- return ...
- end
-end
-
-function number.bits(n)
- return { bits(n,1) }
-end
+-- function number.bits(n)
+-- return { bits(n,1) }
+-- end
function number.bytetodecimal(b)
local d = floor(b * 100 / 255 + 0.5)
diff --git a/tex/context/base/mkiv/l-package.lua b/tex/context/base/mkiv/l-package.lua
index 075fcde25..d43c5c5c6 100644
--- a/tex/context/base/mkiv/l-package.lua
+++ b/tex/context/base/mkiv/l-package.lua
@@ -23,6 +23,10 @@ local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match
local package = package
local searchers = package.searchers or package.loaders
+local insert, remove = table.insert, table.remove
+
+-------.loaders = nil -- old stuff that we don't want
+-------.seeall = nil -- old stuff that we don't want
-- dummies
@@ -191,7 +195,25 @@ local function registerpath(tag,what,target,...)
add(path)
end
end
- return paths
+end
+
+local function pushpath(tag,what,target,path)
+ local path = helpers.cleanpath(path)
+ insert(target,1,path)
+ if helpers.trace then
+ helpers.report("pushing %s path in front: %s",tag,path)
+ end
+end
+
+local function poppath(tag,what,target)
+ local path = remove(target,1)
+ if helpers.trace then
+ if path then
+ helpers.report("popping %s path from front: %s",tag,path)
+ else
+ helpers.report("no %s path to pop",tag)
+ end
+ end
end
helpers.registerpath = registerpath
@@ -199,10 +221,22 @@ helpers.registerpath = registerpath
function package.extraluapath(...)
registerpath("extra lua","lua",extraluapaths,...)
end
+function package.pushluapath(path)
+ pushpath("extra lua","lua",extraluapaths,path)
+end
+function package.popluapath()
+ poppath("extra lua","lua",extraluapaths)
+end
function package.extralibpath(...)
registerpath("extra lib","lib",extralibpaths,...)
end
+function package.pushlibpath(path)
+ pushpath("extra lib","lib",extralibpaths,path)
+end
+function package.poplibpath()
+ poppath("extra lib","lua",extralibpaths)
+end
-- lib loader (used elsewhere)
diff --git a/tex/context/base/mkiv/l-sandbox.lua b/tex/context/base/mkiv/l-sandbox.lua
index 7a89aa8cd..2ecec0023 100644
--- a/tex/context/base/mkiv/l-sandbox.lua
+++ b/tex/context/base/mkiv/l-sandbox.lua
@@ -17,7 +17,7 @@ local global = _G
local next = next
local unpack = unpack or table.unpack
local type = type
-local tprint = texio.write_nl or print
+local tprint = texio and texio.write_nl or print
local tostring = tostring
local format = string.format -- no formatters yet
local concat = table.concat
@@ -310,3 +310,4 @@ if supported("lfs") then
lfs.shortname = register(lfs.shortname, "lfs.shortname")
lfs.symlinkattributes = register(lfs.symlinkattributes,"lfs.symlinkattributes")
end
+
diff --git a/tex/context/base/mkiv/l-string.lua b/tex/context/base/mkiv/l-string.lua
index e0fb28445..8ae8d8d1d 100644
--- a/tex/context/base/mkiv/l-string.lua
+++ b/tex/context/base/mkiv/l-string.lua
@@ -18,7 +18,7 @@ local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs
--
-- function string.split(str,pattern)
-- local t = { }
--- if #str > 0 then
+-- if str ~= "" then
-- local n = 1
-- for s in gmatch(str..pattern,"(.-)"..pattern) do
-- t[n] = s
@@ -220,11 +220,11 @@ string.unquote = string.unquoted
-- new
-if not string.bytetable then
+if not string.bytetable then -- used in font-cff.lua
local limit = 5000 -- we can go to 8000 in luajit and much higher in lua if needed
- function string.bytetable(str)
+ function string.bytetable(str) -- from a string
local n = #str
if n > limit then
local t = { byte(str,1,limit) }
diff --git a/tex/context/base/mkiv/l-table.lua b/tex/context/base/mkiv/l-table.lua
index 3c1ce6daf..9d7152544 100644
--- a/tex/context/base/mkiv/l-table.lua
+++ b/tex/context/base/mkiv/l-table.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select
+local type, next, tostring, tonumber, select = type, next, tostring, tonumber, select
local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, lower, dump = string.format, string.lower, string.dump
@@ -22,6 +22,10 @@ local floor = math.floor
local stripper = patterns.stripper
+function table.getn(t)
+ return t and #t -- for very old times sake
+end
+
function table.strip(tab)
local lst, l = { }, 0
for i=1,#tab do
@@ -460,7 +464,7 @@ function table.tohash(t,value)
local h = { }
if t then
if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
+ for _, v in next, t do
h[v] = value
end
end
@@ -469,7 +473,7 @@ end
function table.fromhash(t)
local hsh, h = { }, 0
- for k, v in next, t do -- no ipairs here
+ for k, v in next, t do
if v then
h = h + 1
hsh[h] = k
@@ -1092,7 +1096,9 @@ function table.unnest(t) -- bad name
end
local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
+ if a == b then
+ return true
+ elseif a and b and #a == #b then
n = n or 1
m = m or #a
for i=n,m do
@@ -1114,16 +1120,18 @@ local function are_equal(a,b,n,m) -- indexed
end
local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[ka]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
+ if a ~= b then
+ for ka, va in next, a do
+ local vb = b[ka]
+ if va == vb then
+ -- same
+ elseif type(va) == "table" and type(vb) == "table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
return false
end
- else
- return false
end
end
return true
@@ -1388,3 +1396,26 @@ function table.filtered(t,pattern,sort,cmp)
return nothing
end
end
+
+-- lua 5.3:
+
+if not table.move then
+
+ function table.move(a1,f,e,t,a2)
+ if a2 and a1 ~= a2 then
+ for i=f,e do
+ a2[t] = a1[i]
+ t = t + 1
+ end
+ return a2
+ else
+ t = t + e - f
+ for i=e,f,-1 do
+ a1[t] = a1[i]
+ t = t - 1
+ end
+ return a1
+ end
+ end
+
+end
diff --git a/tex/context/base/mkiv/l-unicode.lua b/tex/context/base/mkiv/l-unicode.lua
index b913d0cfc..b5f52d312 100644
--- a/tex/context/base/mkiv/l-unicode.lua
+++ b/tex/context/base/mkiv/l-unicode.lua
@@ -6,6 +6,9 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- floor(b/256) => rshift(b, 8)
+-- floor(b/1024) => rshift(b,10)
+
-- in lua 5.3:
-- utf8.char(···) : concatinated
@@ -18,6 +21,9 @@ if not modules then modules = { } end modules ['l-unicode'] = {
-- todo: utf.sub replacement (used in syst-aux)
-- we put these in the utf namespace:
+-- used : byte char gmatch len lower sub upper
+-- not used : dump find format gfind gsub match rep reverse
+
utf = utf or (unicode and unicode.utf8) or { }
utf.characters = utf.characters or string.utfcharacters
@@ -29,6 +35,9 @@ utf.values = utf.values or string.utfvalues
-- string.characterpairs
-- string.bytes
-- string.bytepairs
+-- string.utflength
+-- string.utfvalues
+-- string.utfcharacters
local type = type
local char, byte, format, sub, gmatch = string.char, string.byte, string.format, string.sub, string.gmatch
@@ -64,53 +73,103 @@ end
if not utf.char then
- local floor, char = math.floor, string.char
-
- function utf.char(n)
- if n < 0x80 then
- -- 0aaaaaaa : 0x80
- return char(n)
- elseif n < 0x800 then
- -- 110bbbaa : 0xC0 : n >> 6
- -- 10aaaaaa : 0x80 : n & 0x3F
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- -- 1110bbbb : 0xE0 : n >> 12
- -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
- -- 10aaaaaa : 0x80 : n & 0x3F
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x200000 then
- -- 11110ccc : 0xF0 : n >> 18
- -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
- -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
- -- 10aaaaaa : 0x80 : n & 0x3F
- -- dddd : ccccc - 1
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + (floor(n/0x1000) % 0x40),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
+ utf.char = string.utfcharacter or (utf8 and utf8.char)
+
+ if not utf.char then
+
+ -- no multiples
+
+ local char = string.char
+
+ if bit32 then
+
+ local rshift = bit32.rshift
+
+ function utf.char(n)
+ if n < 0x80 then
+ -- 0aaaaaaa : 0x80
+ return char(n)
+ elseif n < 0x800 then
+ -- 110bbbaa : 0xC0 : n >> 6
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ return char(
+ 0xC0 + rshift(n,6),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x10000 then
+ -- 1110bbbb : 0xE0 : n >> 12
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ return char(
+ 0xE0 + rshift(n,12),
+ 0x80 + (rshift(n,6) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x200000 then
+ -- 11110ccc : 0xF0 : n >> 18
+ -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ -- dddd : ccccc - 1
+ return char(
+ 0xF0 + rshift(n,18),
+ 0x80 + (rshift(n,12) % 0x40),
+ 0x80 + (rshift(n,6) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ else
+ return ""
+ end
+ end
+
else
- return ""
+
+ local floor = math.floor
+
+ function utf.char(n)
+ if n < 0x80 then
+ return char(n)
+ elseif n < 0x800 then
+ return char(
+ 0xC0 + floor(n/0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x10000 then
+ return char(
+ 0xE0 + floor(n/0x1000),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x200000 then
+ return char(
+ 0xF0 + floor(n/0x40000),
+ 0x80 + (floor(n/0x1000) % 0x40),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ else
+ return ""
+ end
+ end
+
end
+
end
end
if not utf.byte then
- local utf8byte = patterns.utf8byte
+ utf.byte = string.utfvalue or (utf8 and utf8.codepoint)
+
+ if not utf.byte then
+
+ local utf8byte = patterns.utf8byte
+
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
- function utf.byte(c)
- return lpegmatch(utf8byte,c)
end
end
@@ -171,43 +230,6 @@ local one = P(1)
local two = C(1) * C(1)
local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
--- actually one of them is already utf ... sort of useless this one
-
--- function utf.char(n)
--- if n < 0x80 then
--- return char(n)
--- elseif n < 0x800 then
--- return char(
--- 0xC0 + floor(n/0x40),
--- 0x80 + (n % 0x40)
--- )
--- elseif n < 0x10000 then
--- return char(
--- 0xE0 + floor(n/0x1000),
--- 0x80 + (floor(n/0x40) % 0x40),
--- 0x80 + (n % 0x40)
--- )
--- elseif n < 0x40000 then
--- return char(
--- 0xF0 + floor(n/0x40000),
--- 0x80 + floor(n/0x1000),
--- 0x80 + (floor(n/0x40) % 0x40),
--- 0x80 + (n % 0x40)
--- )
--- else
--- -- return char(
--- -- 0xF1 + floor(n/0x1000000),
--- -- 0x80 + floor(n/0x40000),
--- -- 0x80 + floor(n/0x1000),
--- -- 0x80 + (floor(n/0x40) % 0x40),
--- -- 0x80 + (n % 0x40)
--- -- )
--- return "?"
--- end
--- end
---
--- merge into:
-
local pattern = P("\254\255") * Cs( (
four / function(a,b,c,d)
local ab = 0xFF * byte(a) + byte(b)
@@ -253,83 +275,89 @@ end
if not utf.len then
- -- -- alternative 1: 0.77
- --
- -- local utfcharcounter = utfbom^-1 * Cs((p_utf8char/'!')^0)
- --
- -- function utf.len(str)
- -- return #lpegmatch(utfcharcounter,str or "")
- -- end
- --
- -- -- alternative 2: 1.70
- --
- -- local n = 0
- --
- -- local utfcharcounter = utfbom^-1 * (p_utf8char/function() n = n + 1 end)^0 -- slow
- --
- -- function utf.length(str)
- -- n = 0
- -- lpegmatch(utfcharcounter,str or "")
- -- return n
- -- end
- --
- -- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
-
- -- local n = 0
- --
- -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( Cp() * (
- -- -- patterns.utf8one ^1 * Cc(1)
- -- -- + patterns.utf8two ^1 * Cc(2)
- -- -- + patterns.utf8three^1 * Cc(3)
- -- -- + patterns.utf8four ^1 * Cc(4) ) * Cp() / function(f,d,t) n = n + (t - f)/d end
- -- -- )^0 ) -- just as many captures as below
- --
- -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( (
- -- -- (Cmt(patterns.utf8one ^1,function(_,_,s) n = n + #s return true end))
- -- -- + (Cmt(patterns.utf8two ^1,function(_,_,s) n = n + #s/2 return true end))
- -- -- + (Cmt(patterns.utf8three^1,function(_,_,s) n = n + #s/3 return true end))
- -- -- + (Cmt(patterns.utf8four ^1,function(_,_,s) n = n + #s/4 return true end))
- -- -- )^0 ) -- not interesting as it creates strings but sometimes faster
- --
- -- -- The best so far:
- --
- -- local utfcharcounter = utfbom^-1 * P ( (
- -- Cp() * (patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
- -- + Cp() * (patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
- -- + Cp() * (patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
- -- + Cp() * (patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
- -- )^0 )
-
- -- function utf.len(str)
- -- n = 0
- -- lpegmatch(utfcharcounter,str or "")
- -- return n
- -- end
-
- local n, f = 0, 1
+ utf.len = string.utflength or (utf8 and utf8.len)
+
+ if not utf.len then
+
+ -- -- alternative 1: 0.77
+ --
+ -- local utfcharcounter = utfbom^-1 * Cs((p_utf8char/'!')^0)
+ --
+ -- function utf.len(str)
+ -- return #lpegmatch(utfcharcounter,str or "")
+ -- end
+ --
+ -- -- alternative 2: 1.70
+ --
+ -- local n = 0
+ --
+ -- local utfcharcounter = utfbom^-1 * (p_utf8char/function() n = n + 1 end)^0 -- slow
+ --
+ -- function utf.length(str)
+ -- n = 0
+ -- lpegmatch(utfcharcounter,str or "")
+ -- return n
+ -- end
+ --
+ -- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
+
+ -- local n = 0
+ --
+ -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( Cp() * (
+ -- -- patterns.utf8one ^1 * Cc(1)
+ -- -- + patterns.utf8two ^1 * Cc(2)
+ -- -- + patterns.utf8three^1 * Cc(3)
+ -- -- + patterns.utf8four ^1 * Cc(4) ) * Cp() / function(f,d,t) n = n + (t - f)/d end
+ -- -- )^0 ) -- just as many captures as below
+ --
+ -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( (
+ -- -- (Cmt(patterns.utf8one ^1,function(_,_,s) n = n + #s return true end))
+ -- -- + (Cmt(patterns.utf8two ^1,function(_,_,s) n = n + #s/2 return true end))
+ -- -- + (Cmt(patterns.utf8three^1,function(_,_,s) n = n + #s/3 return true end))
+ -- -- + (Cmt(patterns.utf8four ^1,function(_,_,s) n = n + #s/4 return true end))
+ -- -- )^0 ) -- not interesting as it creates strings but sometimes faster
+ --
+ -- -- The best so far:
+ --
+ -- local utfcharcounter = utfbom^-1 * P ( (
+ -- Cp() * (patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
+ -- + Cp() * (patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
+ -- + Cp() * (patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
+ -- + Cp() * (patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
+ -- )^0 )
+
+ -- function utf.len(str)
+ -- n = 0
+ -- lpegmatch(utfcharcounter,str or "")
+ -- return n
+ -- end
+
+ local n, f = 0, 1
+
+ local utfcharcounter = patterns.utfbom^-1 * Cmt (
+ Cc(1) * patterns.utf8one ^1
+ + Cc(2) * patterns.utf8two ^1
+ + Cc(3) * patterns.utf8three^1
+ + Cc(4) * patterns.utf8four ^1,
+ function(_,t,d) -- due to Cc no string captures, so faster
+ n = n + (t - f)/d
+ f = t
+ return true
+ end
+ )^0
- local utfcharcounter = patterns.utfbom^-1 * Cmt (
- Cc(1) * patterns.utf8one ^1
- + Cc(2) * patterns.utf8two ^1
- + Cc(3) * patterns.utf8three^1
- + Cc(4) * patterns.utf8four ^1,
- function(_,t,d) -- due to Cc no string captures, so faster
- n = n + (t - f)/d
- f = t
- return true
+ function utf.len(str)
+ n, f = 0, 1
+ lpegmatch(utfcharcounter,str or "")
+ return n
end
- )^0
- function utf.len(str)
- n, f = 0, 1
- lpegmatch(utfcharcounter,str or "")
- return n
- end
+ -- -- these are quite a bit slower:
- -- -- these are quite a bit slower:
+ -- utfcharcounter = utfbom^-1 * (Cmt(P(1) * R("\128\191")^0, function() n = n + 1 return true end))^0 -- 50+ times slower
+ -- utfcharcounter = utfbom^-1 * (Cmt(P(1), function() n = n + 1 return true end) * R("\128\191")^0)^0 -- 50- times slower
- -- utfcharcounter = utfbom^-1 * (Cmt(P(1) * R("\128\191")^0, function() n = n + 1 return true end))^0 -- 50+ times slower
- -- utfcharcounter = utfbom^-1 * (Cmt(P(1), function() n = n + 1 return true end) * R("\128\191")^0)^0 -- 50- times slower
+ end
end
@@ -1041,21 +1069,23 @@ end
local function little(b)
if b < 0x10000 then
- return char(b%256,b/256)
+ return char(b%256,rshift(b,8))
else
b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1%256,b1/256,b2%256,b2/256)
+ local b1 = rshift(b,10) + 0xD800
+ local b2 = b%1024 + 0xDC00
+ return char(b1%256,rshift(b1,8),b2%256,rshift(b2,8))
end
end
local function big(b)
if b < 0x10000 then
- return char(b/256,b%256)
+ return char(rshift(b,8),b%256)
else
b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1/256,b1%256,b2/256,b2%256)
+ local b1 = rshift(b,10) + 0xD800
+ local b2 = b%1024 + 0xDC00
+ return char(rshift(b1,8),b1%256,rshift(b2,8),b2%256)
end
end
@@ -1275,30 +1305,57 @@ end
--
-- local utf32 = table.setmetatableindex(function(t,k) local v = toutf32(k) t[k] = v return v end)
-local extract = bit32.extract
-local char = string.char
-
-function unicode.toutf32string(n)
- if n <= 0xFF then
- return
- char(n) ..
- "\000\000\000"
- elseif n <= 0xFFFF then
- return
- char(extract(n, 0,8)) ..
- char(extract(n, 8,8)) ..
- "\000\000"
- elseif n <= 0xFFFFFF then
- return
- char(extract(n, 0,8)) ..
- char(extract(n, 8,8)) ..
- char(extract(n,16,8)) ..
- "\000"
- else
- return
- char(extract(n, 0,8)) ..
- char(extract(n, 8,8)) ..
- char(extract(n,16,8)) ..
- char(extract(n,24,8))
+if bit32 then
+
+ local extract = bit32.extract
+ local char = string.char
+
+ function unicode.toutf32string(n)
+ if n <= 0xFF then
+ return
+ char(n) ..
+ "\000\000\000"
+ elseif n <= 0xFFFF then
+ return
+ char(extract(n, 0,8)) ..
+ char(extract(n, 8,8)) ..
+ "\000\000"
+ elseif n <= 0xFFFFFF then
+ return
+ char(extract(n, 0,8)) ..
+ char(extract(n, 8,8)) ..
+ char(extract(n,16,8)) ..
+ "\000"
+ else
+ return
+ char(extract(n, 0,8)) ..
+ char(extract(n, 8,8)) ..
+ char(extract(n,16,8)) ..
+ char(extract(n,24,8))
+ end
+ end
+
+end
+
+-- goodie:
+
+local len = utf.len
+local rep = rep
+
+function string.utfpadd(s,n)
+ if n and n ~= 0 then
+ local l = len(s)
+ if n > 0 then
+ local d = n - l
+ if d > 0 then
+ return rep(c or " ",d) .. s
+ end
+ else
+ local d = - n - l
+ if d > 0 then
+ return s .. rep(c or " ",d)
+ end
+ end
end
+ return s
end
diff --git a/tex/context/base/mkiv/l-url.lua b/tex/context/base/mkiv/l-url.lua
index b189ec5bb..6f834d778 100644
--- a/tex/context/base/mkiv/l-url.lua
+++ b/tex/context/base/mkiv/l-url.lua
@@ -8,9 +8,10 @@ if not modules then modules = { } end modules ['l-url'] = {
local char, format, byte = string.char, string.format, string.byte
local concat = table.concat
-local tonumber, type = tonumber, type
+local tonumber, type, next = tonumber, type, next
local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V
local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
+local sortedhash = table.sortedhash
-- from wikipedia:
--
@@ -32,22 +33,61 @@ local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replac
url = url or { }
local url = url
-local tochar = function(s) return char(tonumber(s,16)) end
+local unescapes = { }
+local escapes = { }
+
+setmetatable(unescapes, { __index = function(t,k)
+ local v = char(tonumber(k,16))
+ t[k] = v
+ return v
+end })
+
+setmetatable(escapes, { __index = function(t,k)
+ local v = format("%%%02X",byte(k))
+ t[k] = v
+ return v
+end })
+
+-- okay:
local colon = P(":")
local qmark = P("?")
local hash = P("#")
local slash = P("/")
+local atsign = P("@")
local percent = P("%")
local endofstring = P(-1)
-
local hexdigit = R("09","AF","af")
local plus = P("+")
local nothing = Cc("")
-local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
-local escaped = (plus / " ") + escapedchar -- so no loc://foo++.tex
-
-local noslash = P("/") / ""
+local okay = R("09","AZ","az") + S("-_.,:=+*~!'()@&$")
+
+local escapedchar = (percent * C(hexdigit * hexdigit)) / unescapes
+local unescapedchar = P(1) / escapes
+local escaped = (plus / " ") + escapedchar -- so no loc://foo++.tex
+local noslash = P("/") / ""
+local plustospace = P("+")/" "
+
+local decoder = Cs( (
+ plustospace
+ + escapedchar
+ + P("\r\n")/"\n"
+ + P(1)
+ )^0 )
+local encoder = Cs( (
+ R("09","AZ","az")^1
+ + S("-./_")^1
+ + P(" ")/"+"
+ + P("\n")/"\r\n"
+ + unescapedchar
+ )^0 )
+
+lpegpatterns.urldecoder = decoder
+lpegpatterns.urlencoder = encoder
+
+function url.decode (str) return str and lpegmatch(decoder, str) or str end
+function url.encode (str) return str and lpegmatch(encoder, str) or str end
+function url.unescape(str) return str and lpegmatch(unescaper,str) or str end
-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
-- we also assume that when we have a scheme, we also have an authority
@@ -73,17 +113,9 @@ local parser = Ct(validurl)
lpegpatterns.url = validurl
lpegpatterns.urlsplitter = parser
-local escapes = { }
-
-setmetatable(escapes, { __index = function(t,k)
- local v = format("%%%02X",byte(k))
- t[k] = v
- return v
-end })
-
-local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
+local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_:")^1 + P(1) / escapes)^0) -- space happens most
local unescaper = Cs((escapedchar + 1)^0)
-local getcleaner = Cs((P("+++") / "%%2B" + P("+") / "%%20" + P(1))^1)
+local getcleaner = Cs((P("+++")/"%%2B" + P("+")/"%%20" + P(1))^1)
lpegpatterns.urlunescaped = escapedchar
lpegpatterns.urlescaper = escaper
@@ -134,8 +166,8 @@ local backslashswapper = replacer("\\","/")
local equal = P("=")
local amp = P("&")
-local key = Cs(((escapedchar+1)-equal )^0)
-local value = Cs(((escapedchar+1)-amp -endofstring)^0)
+local key = Cs(((plustospace + escapedchar + 1) - equal )^0)
+local value = Cs(((plustospace + escapedchar + 1) - amp - endofstring)^0)
local splitquery = Cf ( Ct("") * P { "sequence",
sequence = V("pair") * (amp * V("pair"))^0,
@@ -144,6 +176,11 @@ local splitquery = Cf ( Ct("") * P { "sequence",
-- hasher
+local userpart = (1-atsign-colon)^1
+local serverpart = (1-colon)^1
+local splitauthority = ((Cs(userpart) * colon * Cs(userpart) + Cs(userpart) * Cc(nil)) * atsign + Cc(nil) * Cc(nil))
+ * Cs(serverpart) * (colon * (serverpart/tonumber) + Cc(nil))
+
local function hashed(str) -- not yet ok (/test?test)
if not str or str == "" then
return {
@@ -177,12 +214,21 @@ local function hashed(str) -- not yet ok (/test?test)
-- not always a filename but handy anyway
local authority = detailed[2]
local path = detailed[3]
- local filename = nil
+ local filename -- = nil
+ local username -- = nil
+ local password -- = nil
+ local host -- = nil
+ local port -- = nil
+ if authority ~= "" then
+ -- these can be invalid
+ username, password, host, port = lpegmatch(splitauthority,authority)
+ end
if authority == "" then
filename = path
elseif path == "" then
filename = ""
else
+ -- this one can be can be invalid
filename = authority .. "/" .. path
end
return {
@@ -195,6 +241,11 @@ local function hashed(str) -- not yet ok (/test?test)
original = str,
noscheme = false,
filename = filename,
+ --
+ host = host,
+ port = port,
+ -- usename = username,
+ -- password = password,
}
end
@@ -236,24 +287,38 @@ function url.addscheme(str,scheme) -- no authority
end
function url.construct(hash) -- dodo: we need to escape !
- local fullurl, f = { }, 0
- local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
+ local result, r = { }, 0
+ local scheme = hash.scheme
+ local authority = hash.authority
+ local path = hash.path
+ local queries = hash.queries
+ local fragment = hash.fragment
if scheme and scheme ~= "" then
- f = f + 1 ; fullurl[f] = scheme .. "://"
+ r = r + 1 ; result[r] = lpegmatch(escaper,scheme)
+ r = r + 1 ; result[r] = "://"
end
if authority and authority ~= "" then
- f = f + 1 ; fullurl[f] = authority
+ r = r + 1 ; result[r] = lpegmatch(escaper,authority)
end
if path and path ~= "" then
- f = f + 1 ; fullurl[f] = "/" .. path
+ r = r + 1 ; result[r] = "/"
+ r = r + 1 ; result[r] = lpegmatch(escaper,path)
end
- if query and query ~= "" then
- f = f + 1 ; fullurl[f] = "?".. query
+ if queries then
+ local done = false
+ for k, v in sortedhash(queries) do
+ r = r + 1 ; result[r] = done and "&" or "?"
+ r = r + 1 ; result[r] = lpegmatch(escaper,k) -- is this escaped
+ r = r + 1 ; result[r] = "="
+ r = r + 1 ; result[r] = lpegmatch(escaper,v) -- is this escaped
+ done = true
+ end
end
if fragment and fragment ~= "" then
- f = f + 1 ; fullurl[f] = "#".. fragment
+ r = r + 1 ; result[r] = "#"
+ r = r + 1 ; result[r] = lpegmatch(escaper,fragment)
end
- return lpegmatch(escaper,concat(fullurl))
+ return concat(result)
end
local pattern = Cs(slash^-1/"" * R("az","AZ") * ((S(":|")/":") + P(":")) * slash * P(1)^0)
diff --git a/tex/context/base/mkiv/lang-def.mkiv b/tex/context/base/mkiv/lang-def.mkiv
index 96bb88767..28245e591 100644
--- a/tex/context/base/mkiv/lang-def.mkiv
+++ b/tex/context/base/mkiv/lang-def.mkiv
@@ -381,6 +381,23 @@
% Anatolian Languages
+% Hebrew
+
+\installlanguage % same as arabic now .. someone needs to fix this
+ [\s!he]
+ [\c!spacing=\v!broad,
+ \c!leftsentence=\emdash,
+ \c!rightsentence=\emdash,
+ \c!leftsubsentence=\emdash,
+ \c!rightsubsentence=\emdash,
+ \c!leftquote=\upperleftsinglesixquote,
+ \c!rightquote=\upperrightsingleninequote,
+ \c!leftquotation=\upperleftdoublesixquote,
+ \c!rightquotation=\upperrightdoubleninequote,
+ \c!date={\v!day,\space,\v!month,{،\space},\v!year}]
+
+\installlanguage [\s!hebrew] [\s!he]
+
% Arabic Languages
\installlanguage
diff --git a/tex/context/base/mkiv/lang-dis.lua b/tex/context/base/mkiv/lang-dis.lua
index e2c0d220e..65a53a702 100644
--- a/tex/context/base/mkiv/lang-dis.lua
+++ b/tex/context/base/mkiv/lang-dis.lua
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['lang-dis'] = {
local concat = table.concat
+local tex = tex
local nodes = nodes
local tasks = nodes.tasks
@@ -19,7 +20,6 @@ local setaction = tasks.setaction
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
local setfield = nuts.setfield
local getnext = nuts.getnext
local getprev = nuts.getprev
@@ -44,6 +44,8 @@ local traverse_id = nuts.traverse_id
local flush_list = nuts.flush_list
local flush_node = nuts.flush_node
+local new_disc = nuts.pool.disc
+
local nodecodes = nodes.nodecodes
local disccodes = nodes.disccodes
@@ -59,161 +61,11 @@ local a_visualize = attributes.private("visualizediscretionary")
local setattribute = tex.setattribute
local getlanguagedata = languages.getdata
+local prehyphenchar = lang.prehyphenchar
+local posthyphenchar = lang.posthyphenchar
local check_regular = true
-local expanders -- this will go away
-
--- the penalty has been determined by the mode (currently we force 1):
---
--- 0 : exhyphenpenalty
--- 1 : hyphenpenalty
--- 2 : automatichyphenpenalty
---
--- following a - : the pre and post chars are already appended and set
--- so we have pre=preex and post=postex .. however, the previous
--- hyphen is already injected ... downside: the font handler sees this
--- so this is another argument for doing a hyphenation pass in context
-
-if LUATEXVERSION < 1.005 then
-
- expanders = {
- [discretionary_code] = function(d,template)
- -- \discretionary
- return template
- end,
- [explicit_code] = function(d,template)
- -- \-
- local pre, post, replace = getdisc(d)
- local done = false
- if pre then
- local char = isglyph(pre)
- if char and char <= 0 then
- done = true
- flush_list(pre)
- pre = nil
- end
- end
- if post then
- local char = isglyph(post)
- if char and char <= 0 then
- done = true
- flush_list(post)
- post = nil
- end
- end
- if done then
- -- todo: take existing penalty
- setdisc(d,pre,post,replace,explicit_code,tex.exhyphenpenalty)
- else
- setsubtype(d,explicit_code)
- end
- return template
- end,
- [automatic_code] = function(d,template)
- local pre, post, replace = getdisc(d)
- if pre then
- -- we have a preex characters and want that one to replace the
- -- character in front which is the trigger
- if not template then
- -- can there be font kerns already?
- template = getprev(d)
- if template and getid(template) ~= glyph_code then
- template = getnext(d)
- if template and getid(template) ~= glyph_code then
- template = nil
- end
- end
- end
- if template then
- local pseudohead = getprev(template)
- if pseudohead then
- while template ~= d do
- pseudohead, template, removed = remove_node(pseudohead,template)
- -- free old replace ?
- replace = removed
- -- break ?
- end
- else
- -- can't happen
- end
- setdisc(d,pre,post,replace,automatic_code,tex.hyphenpenalty)
- else
- -- print("lone regular discretionary ignored")
- end
- else
- setdisc(d,pre,post,replace,automatic_code,tex.hyphenpenalty)
- end
- return template
- end,
- [regular_code] = function(d,template)
- if check_regular then
- -- simple
- if not template then
- -- can there be font kerns already?
- template = getprev(d)
- if template and getid(template) ~= glyph_code then
- template = getnext(d)
- if template and getid(template) ~= glyph_code then
- template = nil
- end
- end
- end
- if template then
- local language = template and getlang(template)
- local data = getlanguagedata(language)
- local prechar = data.prehyphenchar
- local postchar = data.posthyphenchar
- local pre, post, replace = getdisc(d) -- pre can be set
- local done = false
- if prechar and prechar > 0 then
- done = true
- pre = copy_node(template)
- setchar(pre,prechar)
- end
- if postchar and postchar > 0 then
- done = true
- post = copy_node(template)
- setchar(post,postchar)
- end
- if done then
- setdisc(d,pre,post,replace,regular_code,tex.hyphenpenalty)
- end
- else
- -- print("lone regular discretionary ignored")
- end
- return template
- end
- end,
- [disccodes.first] = function()
- -- forget about them
- end,
- [disccodes.second] = function()
- -- forget about them
- end,
- }
-
- function languages.expand(d,template,subtype)
- if not subtype then
- subtype = getsubtype(d)
- end
- if subtype ~= discretionary_code then
- return expanders[subtype](d,template)
- end
- end
-
-else
-
- function languages.expand()
- -- nothing to be fixed
- end
-
-end
-
-languages.expanders = expanders
-
--- -- -- -- --
-
local setlistcolor = nodes.tracers.colors.setlist
function languages.visualizediscretionaries(head)
@@ -252,55 +104,83 @@ interfaces.implement {
actions = languages.showdiscretionaries
}
-local toutf = nodes.listtoutf
+do
+
+ local toutf = nodes.listtoutf
+ local utfchar = utf.char
+ local f_disc = string.formatters["{%s}{%s}{%s}"]
+ local replace = lpeg.replacer( {
+ [utfchar(0x200C)] = "|",
+ [utfchar(0x200D)] = "|",
+ }, nil, true)
+
+ local function convert(list)
+ return list and replace(toutf(list)) or ""
+ end
+
+ function languages.serializediscretionary(d) -- will move to tracer
+ local pre, post, replace = getdisc(d)
+ return f_disc(convert(pre),convert(post),convert(replace))
+ end
-function languages.serializediscretionary(d) -- will move to tracer
- local pre, post, replace = getdisc(d)
- return string.formatters["{%s}{%s}{%s}"](
- pre and toutf(pre) or "",
- post and toutf(post) or "",
- replace and toutf(replace) or ""
- )
end
-- --
local wiped = 0
-local function wipe(head,delayed)
- local p, n = getboth(delayed)
- local _, _, h, _, _, t = getdisc(delayed,true)
- if p or n then
- if h then
- setlink(p,h)
- setlink(t,n)
- setfield(delayed,"replace")
- else
- setlink(p,n)
- end
+local flatten_discretionaries = node.flatten_discretionaries -- todo in nodes
+
+if flatten_discretionaries then
+
+ -- This is not that much faster than the lua variant simply because there is
+ -- seldom a replace list but it fits in the picture. See luatex-todo.w for the
+ -- code.
+
+ function languages.flatten(head)
+ local h, n = flatten_discretionaries(head)
+ wiped = wiped + n
+ return h, n > 0
end
- if head == delayed then
- head = h
+
+else
+
+ local function wipe(head,delayed)
+ local p, n = getboth(delayed)
+ local _, _, h, _, _, t = getdisc(delayed,true)
+ if p or n then
+ if h then
+ setlink(p,h)
+ setlink(t,n)
+ setfield(delayed,"replace")
+ else
+ setlink(p,n)
+ end
+ end
+ if head == delayed then
+ head = h
+ end
+ wiped = wiped + 1
+ flush_node(delayed)
+ return head
end
- wiped = wiped + 1
- flush_node(delayed)
- return head
-end
-function languages.flatten(head)
- local nuthead = tonut(head)
- local delayed = nil
- for d in traverse_id(disc_code,nuthead) do
+ function languages.flatten(head)
+ local nuthead = tonut(head)
+ local delayed = nil
+ for d in traverse_id(disc_code,nuthead) do
+ if delayed then
+ nuthead = wipe(nuthead,delayed)
+ end
+ delayed = d
+ end
if delayed then
- nuthead = wipe(nuthead,delayed)
+ return tonode(wipe(nuthead,delayed)), true
+ else
+ return head, false
end
- delayed = d
- end
- if delayed then
- return tonode(wipe(nuthead,delayed)), true
- else
- return head, false
end
+
end
function languages.nofflattened()
@@ -325,3 +205,28 @@ directives.register("hyphenator.flatten",function(v)
setaction("processors","nodes.handlers.flatten",v)
setaction("contributers","nodes.handlers.flattenline",v)
end)
+
+-- moved here:
+
+function languages.explicithyphen(template)
+ local pre, post
+ local disc = new_disc()
+ if template then
+ local langdata = getlanguagedata(getlang(template))
+ local instance = langdata and langdata.instance
+ if instance then
+ local prechr = prehyphenchar(instance)
+ local postchr = posthyphenchar(instance)
+ if prechr >= 0 then
+ pre = copy_node(template)
+ setchar(pre,prechr)
+ end
+ if postchr >= 0 then
+ post = copy_node(template)
+ setchar(post,postchr)
+ end
+ end
+ end
+ setdisc(disc,pre,post,nil,explicit_code,tex.exhyphenpenalty)
+ return disc
+end
diff --git a/tex/context/base/mkiv/lang-exc.lua b/tex/context/base/mkiv/lang-exc.lua
new file mode 100644
index 000000000..bed328da8
--- /dev/null
+++ b/tex/context/base/mkiv/lang-exc.lua
@@ -0,0 +1,16 @@
+if not modules then modules = { } end modules ['lang-exc'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen",
+ copyright = "ConTeXt Development Team",
+ license = "see context related readme files",
+ dataonly = true,
+}
+
+-- Here we add common exceptions. This file can grow. For now we keep it
+-- in the main base tree. We actually need a generic (shared) pattern or
+-- exception file I guess.
+
+return {
+ "lua-jit",
+}
diff --git a/tex/context/base/mkiv/lang-exp.lua b/tex/context/base/mkiv/lang-exp.lua
new file mode 100644
index 000000000..70fad48b0
--- /dev/null
+++ b/tex/context/base/mkiv/lang-exp.lua
@@ -0,0 +1,219 @@
+if not modules then modules = { } end modules ['lang-exp'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module contains snippets that were used before we expanded
+-- discretionaries in the engine which makes way more sense. This
+-- code is nod used any more.
+
+if true then
+ return
+end
+
+-- lang-dis.lua:
+
+local expanders -- this will go away
+
+-- the penalty has been determined by the mode (currently we force 1):
+--
+-- 0 : exhyphenpenalty
+-- 1 : hyphenpenalty
+-- 2 : automatichyphenpenalty
+--
+-- following a - : the pre and post chars are already appended and set
+-- so we have pre=preex and post=postex .. however, the previous
+-- hyphen is already injected ... downside: the font handler sees this
+-- so this is another argument for doing a hyphenation pass in context
+
+if LUATEXVERSION < 1.005 then -- not loaded any more
+
+ expanders = {
+ [discretionary_code] = function(d,template)
+ -- \discretionary
+ return template
+ end,
+ [explicit_code] = function(d,template)
+ -- \-
+ local pre, post, replace = getdisc(d)
+ local done = false
+ if pre then
+ local char = isglyph(pre)
+ if char and char <= 0 then
+ done = true
+ flush_list(pre)
+ pre = nil
+ end
+ end
+ if post then
+ local char = isglyph(post)
+ if char and char <= 0 then
+ done = true
+ flush_list(post)
+ post = nil
+ end
+ end
+ if done then
+ -- todo: take existing penalty
+ setdisc(d,pre,post,replace,explicit_code,tex.exhyphenpenalty)
+ else
+ setsubtype(d,explicit_code)
+ end
+ return template
+ end,
+ [automatic_code] = function(d,template)
+ local pre, post, replace = getdisc(d)
+ if pre then
+ -- we have a preex characters and want that one to replace the
+ -- character in front which is the trigger
+ if not template then
+ -- can there be font kerns already?
+ template = getprev(d)
+ if template and getid(template) ~= glyph_code then
+ template = getnext(d)
+ if template and getid(template) ~= glyph_code then
+ template = nil
+ end
+ end
+ end
+ if template then
+ local pseudohead = getprev(template)
+ if pseudohead then
+ while template ~= d do
+ pseudohead, template, removed = remove_node(pseudohead,template)
+ -- free old replace ?
+ replace = removed
+ -- break ?
+ end
+ else
+ -- can't happen
+ end
+ setdisc(d,pre,post,replace,automatic_code,tex.hyphenpenalty)
+ else
+ -- print("lone regular discretionary ignored")
+ end
+ else
+ setdisc(d,pre,post,replace,automatic_code,tex.hyphenpenalty)
+ end
+ return template
+ end,
+ [regular_code] = function(d,template)
+ if check_regular then
+ -- simple
+ if not template then
+ -- can there be font kerns already?
+ template = getprev(d)
+ if template and getid(template) ~= glyph_code then
+ template = getnext(d)
+ if template and getid(template) ~= glyph_code then
+ template = nil
+ end
+ end
+ end
+ if template then
+ local language = template and getlang(template)
+ local data = getlanguagedata(language)
+ local prechar = data.prehyphenchar
+ local postchar = data.posthyphenchar
+ local pre, post, replace = getdisc(d) -- pre can be set
+ local done = false
+ if prechar and prechar > 0 then
+ done = true
+ pre = copy_node(template)
+ setchar(pre,prechar)
+ end
+ if postchar and postchar > 0 then
+ done = true
+ post = copy_node(template)
+ setchar(post,postchar)
+ end
+ if done then
+ setdisc(d,pre,post,replace,regular_code,tex.hyphenpenalty)
+ end
+ else
+ -- print("lone regular discretionary ignored")
+ end
+ return template
+ end
+ end,
+ [disccodes.first] = function()
+ -- forget about them
+ end,
+ [disccodes.second] = function()
+ -- forget about them
+ end,
+ }
+
+ function languages.expand(d,template,subtype)
+ if not subtype then
+ subtype = getsubtype(d)
+ end
+ if subtype ~= discretionary_code then
+ return expanders[subtype](d,template)
+ end
+ end
+
+else
+
+ function languages.expand()
+ -- nothing to be fixed
+ end
+
+end
+
+languages.expanders = expanders
+
+-- lang-hyp.lua:
+
+----- expanders = languages.expanders -- gone in 1.005
+----- expand_explicit = expanders and expanders[explicit_code]
+----- expand_automatic = expanders and expanders[automatic_code]
+
+if LUATEXVERSION < 1.005 then -- not loaded any more
+
+ expanded = function(head)
+ local done = hyphenate(head)
+ if done then
+ for d in traverse_id(disc_code,tonut(head)) do
+ local s = getsubtype(d)
+ if s ~= discretionary_code then
+ expanders[s](d,template)
+ done = true
+ end
+ end
+ end
+ return head, done
+ end
+
+end
+
+-- if id == disc_code then
+-- if expanded then
+-- -- pre 1.005
+-- local subtype = getsubtype(current)
+-- if subtype == discretionary_code then -- \discretionary
+-- size = 0
+-- elseif subtype == explicit_code then -- \- => only here
+-- -- automatic (-) : the old parser makes negative char entries
+-- size = 0
+-- expand_explicit(current)
+-- elseif subtype == automatic_code then -- - => only here
+-- -- automatic (-) : the old hyphenator turns an exhyphen into glyph+disc
+-- size = 0
+-- expand_automatic(current)
+-- else
+-- -- first : done by the hyphenator
+-- -- second : done by the hyphenator
+-- -- regular : done by the hyphenator
+-- size = 0
+-- end
+-- else
+-- size = 0
+-- end
+-- current = getnext(current)
+-- if hyphenonly then
+-- skipping = true
+-- end
diff --git a/tex/context/base/mkiv/lang-hyp.lua b/tex/context/base/mkiv/lang-hyp.lua
index b85295f19..f73a51b3a 100644
--- a/tex/context/base/mkiv/lang-hyp.lua
+++ b/tex/context/base/mkiv/lang-hyp.lua
@@ -79,6 +79,8 @@ local type, rawset, tonumber, next = type, rawset, tonumber, next
local P, R, S, Cg, Cf, Ct, Cc, C, Carg, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cc, lpeg.C, lpeg.Carg, lpeg.Cs
local lpegmatch = lpeg.match
+local context = context
+
local concat = table.concat
local insert = table.insert
local remove = table.remove
@@ -686,10 +688,6 @@ if context then
local a_hyphenation = attributes.private("hyphenation")
- local expanders = languages.expanders -- gone in 1.005
- local expand_explicit = expanders and expanders[explicit_code]
- local expand_automatic = expanders and expanders[automatic_code]
-
local interwordpenalty = 5000
function traditional.loadpatterns(language)
@@ -1044,6 +1042,7 @@ featureset.hyphenonly = hyphenonly == v_yes
local extrachars = nil
local hyphenchars = nil
local language = nil
+ local lastfont = nil
local start = nil
local stop = nil
local word = { } -- we reuse this table
@@ -1406,6 +1405,8 @@ featureset.hyphenonly = hyphenonly == v_yes
local skipping = false
+ -- In "word word word." the sequences "word" and "." can be a different font!
+
while current and current ~= last do -- and current
local code, id = isglyph(current)
if code then
@@ -1413,7 +1414,8 @@ featureset.hyphenonly = hyphenonly == v_yes
current = getnext(current)
else
local lang = getlang(current)
- if lang ~= language then
+ local font = getfont(current)
+ if lang ~= language or font ~= lastfont then
if dictionary and size > charmin and leftmin + rightmin <= size then
-- only german has many words starting with an uppercase character
if categories[word[1]] == "lu" and getfield(start,"uchyph") < 0 then
@@ -1425,10 +1427,10 @@ featureset.hyphenonly = hyphenonly == v_yes
end
end
end
- language = lang
- if language > 0 then
+ lastfont = font
+ if language ~= lang and lang > 0 then
--
- dictionary = dictionaries[language]
+ dictionary = dictionaries[lang]
instance = dictionary.instance
characters = dictionary.characters
unicodes = dictionary.unicodes
@@ -1459,6 +1461,7 @@ featureset.hyphenonly = hyphenonly == v_yes
else
size = 0
end
+ language = lang
elseif language <= 0 then
--
elseif size > 0 then
@@ -1541,28 +1544,7 @@ featureset.hyphenonly = hyphenonly == v_yes
skipping = false
end
if id == disc_code then
- if expanded then
- -- pre 1.005
- local subtype = getsubtype(current)
- if subtype == discretionary_code then -- \discretionary
- size = 0
- elseif subtype == explicit_code then -- \- => only here
- -- automatic (-) : the old parser makes negative char entries
- size = 0
- expand_explicit(current)
- elseif subtype == automatic_code then -- - => only here
- -- automatic (-) : the old hyphenator turns an exhyphen into glyph+disc
- size = 0
- expand_automatic(current)
- else
- -- first : done by the hyphenator
- -- second : done by the hyphenator
- -- regular : done by the hyphenator
- size = 0
- end
- else
- size = 0
- end
+ size = 0
current = getnext(current)
if hyphenonly then
skipping = true
@@ -1632,7 +1614,6 @@ featureset.hyphenonly = hyphenonly == v_yes
-- local replaceaction = nodes.tasks.replaceaction -- no longer overload this way (too many local switches)
local hyphenate = lang.hyphenate
- local expanders = languages.expanders
local methods = { }
local usedmethod = false
local stack = { }
@@ -1642,29 +1623,6 @@ featureset.hyphenonly = hyphenonly == v_yes
return head, done
end
- local expanded = function (head)
- local done = hyphenate(head)
- return head, done
- end
-
- if LUATEXVERSION< 1.005 then
-
- expanded = function(head)
- local done = hyphenate(head)
- if done then
- for d in traverse_id(disc_code,tonut(head)) do
- local s = getsubtype(d)
- if s ~= discretionary_code then
- expanders[s](d,template)
- done = true
- end
- end
- end
- return head, done
- end
-
- end
-
local getcount = tex.getcount
hyphenators.methods = methods
@@ -1691,7 +1649,7 @@ featureset.hyphenonly = hyphenonly == v_yes
methods.tex = original
methods.original = original
- methods.expanded = expanded -- obsolete starting with 1.005
+ methods.expanded = original -- was expanded before 1.005
methods.traditional = languages.hyphenators.traditional.hyphenate
methods.none = false -- function(head) return head, false end
diff --git a/tex/context/base/mkiv/lang-hyp.mkiv b/tex/context/base/mkiv/lang-hyp.mkiv
index c111bc31a..feec82659 100644
--- a/tex/context/base/mkiv/lang-hyp.mkiv
+++ b/tex/context/base/mkiv/lang-hyp.mkiv
@@ -30,8 +30,8 @@
\writestatus{loading}{ConTeXt Language Macros / Initialization}
-\registerctxluafile{lang-dis}{1.001}
-\registerctxluafile{lang-hyp}{1.001}
+\registerctxluafile{lang-dis}{}
+\registerctxluafile{lang-hyp}{}
\unprotect
diff --git a/tex/context/base/mkiv/lang-ini.lua b/tex/context/base/mkiv/lang-ini.lua
index 6de951998..6d202ff5a 100644
--- a/tex/context/base/mkiv/lang-ini.lua
+++ b/tex/context/base/mkiv/lang-ini.lua
@@ -18,7 +18,7 @@ if not modules then modules = { } end modules ['lang-ini'] = {
-- todo: no foo:bar but foo(bar,...)
-local type, tonumber = type, tonumber
+local type, tonumber, next = type, tonumber, next
local utfbyte = utf.byte
local format, gsub, gmatch, find = string.format, string.gsub, string.gmatch, string.find
local concat, sortedkeys, sortedpairs, keys, insert = table.concat, table.sortedkeys, table.sortedpairs, table.keys, table.insert
@@ -270,6 +270,8 @@ local function unique(tag,requested,loaded)
end
end
+local shared = false
+
local function loaddefinitions(tag,specification)
statistics.starttiming(languages)
local data, instance = resolve(tag)
@@ -295,6 +297,19 @@ local function loaddefinitions(tag,specification)
local ok = false
local resources = data.resources or { }
data.resources = resources
+ if not shared then
+ local found = resolvers.findfile("lang-exc.lua")
+ if found then
+ shared = dofile(found)
+ if type(shared) == "table" then
+ shared = concat(shared," ")
+ else
+ shared = true
+ end
+ else
+ shared = true
+ end
+ end
for i=1,#definitions do
local definition = definitions[i]
if definition == "" then
@@ -344,13 +359,18 @@ local function loaddefinitions(tag,specification)
end
end
if #ploaded > 0 then
+ -- why not always clear
instance:clear_patterns()
instance:patterns(unique(tag,requested,ploaded))
end
if #eloaded > 0 then
+ -- why not always clear
instance:clear_hyphenation()
instance:hyphenation(concat(eloaded," "))
end
+ if type(shared) == "string" then
+ instance:hyphenation(shared)
+ end
return ok
elseif trace_patterns then
report_initialization("no definitions for language %a",tag)
diff --git a/tex/context/base/mkiv/lang-ini.mkiv b/tex/context/base/mkiv/lang-ini.mkiv
index 947422710..7c83ae38f 100644
--- a/tex/context/base/mkiv/lang-ini.mkiv
+++ b/tex/context/base/mkiv/lang-ini.mkiv
@@ -22,9 +22,9 @@
\writestatus{loading}{ConTeXt Language Macros / Initialization}
-\registerctxluafile{lang-ini}{1.001}
-\registerctxluafile{lang-def}{1.001}
-\registerctxluafile{lang-cnt}{1.001}
+\registerctxluafile{lang-ini}{}
+\registerctxluafile{lang-def}{}
+\registerctxluafile{lang-cnt}{}
\unprotect
@@ -220,8 +220,8 @@
%D named:
%D
%D \starttyping
-%D \f!languageprefix-identifier.\f!patternsextension
-%D \f!languageprefix-identifier.\f!hyhensextension
+%D lang-identifier.\f!patternsextension
+%D lang-identifier.\f!hyhensextension
%D \stoptyping
%D
%D The \type{spacing} variable specifies how the spaces after
@@ -539,6 +539,12 @@
\fi
\to \everylanguage
+% new
+
+\appendtoks
+ \usebidiparameter\languageparameter
+\to \everylanguage
+
% this will move to core-spa !
\appendtoks
@@ -559,12 +565,14 @@
% \setuplanguage[\s!default][\s!font=\v!auto]
% \setuplanguage[nl][\s!font=\v!auto]
-\ifdefined\feature \else \let\feature\gobbleoneargument \fi
-
\appendtoks
\edef\p_language_font{\languageparameter\s!font}%
- \ifx\p_language_font\v!auto
+ \ifx\p_language_font\empty\else
+ \ifx\p_language_font\v!auto
\doaddfeature\currentlanguage
+ \else
+ \doaddfeature\p_language_font
+ \fi
\fi
\to \everylanguage
diff --git a/tex/context/base/mkiv/lang-lab.mkiv b/tex/context/base/mkiv/lang-lab.mkiv
index 40845be4a..73637753d 100644
--- a/tex/context/base/mkiv/lang-lab.mkiv
+++ b/tex/context/base/mkiv/lang-lab.mkiv
@@ -13,8 +13,8 @@
\writestatus{loading}{ConTeXt Language Macros / Labels}
-\registerctxluafile{lang-lab}{1.001}
-\registerctxluafile{lang-txt}{1.001}
+\registerctxluafile{lang-lab}{}
+\registerctxluafile{lang-txt}{}
\unprotect
@@ -83,6 +83,8 @@
% hm, not interfaced
+\let\thetextprefix\empty
+
\unexpanded\def\lang_labels_define_class_indeed#1#2#3#4#5#6#7#8#9%
{\setuvalue{setup#1text}{\protecttextprefixes#2\def\currenttextprefixclass{#1}\dodoubleempty\lang_labels_text_prefix_setup}%
\setuvalue{preset#1text}{\protecttextprefixes1\def\currenttextprefixclass{#1}\dodoubleempty\lang_labels_text_prefix_setup}%
diff --git a/tex/context/base/mkiv/lang-mis.mkiv b/tex/context/base/mkiv/lang-mis.mkiv
index ada8f81ea..eb7dc7d80 100644
--- a/tex/context/base/mkiv/lang-mis.mkiv
+++ b/tex/context/base/mkiv/lang-mis.mkiv
@@ -139,16 +139,13 @@
%D \stop
%D \macros
-%D {compoundhyphen,
-%D beginofsubsentence,endofsubsentence}
+%D {compoundhyphen}
%D
%D Now let's go to the macros. First we define some variables. In the main \CONTEXT\
%D modules these can be tuned by a setup command. Watch the (maybe) better looking
%D compound hyphen.
\ifx\compoundhyphen \undefined \unexpanded\def\compoundhyphen {\hbox{-\kern-.25ex-}} \fi
-\ifx\beginofsubsentence\undefined \unexpanded\def\beginofsubsentence{\hbox{\emdash}} \fi
-\ifx\endofsubsentence \undefined \unexpanded\def\endofsubsentence {\hbox{\emdash}} \fi
%D The last two variables are needed for subsentences |<|like this one|>| which we
%D did not yet mention. We want to enable breaking but at the same time don't want
@@ -162,14 +159,17 @@
\unexpanded\def\permithyphenation{\ifhmode\prewordbreak\fi} % doesn't remove spaces
%D \macros
-%D {beginofsubsentencespacing,endofsubsentencespacing}
+%D {beginofsubsentence,endofsubsentence,
+%D beginofsubsentencespacing,endofsubsentencespacing}
%D
%D In the previous macros we provided two hooks which can be used to support nested
%D sub||sentences. In \CONTEXT\ these hooks are used to insert a small space when
%D needed.
-\ifx\beginofsubsentencespacing\undefined \let\beginofsubsentencespacing\relax \fi
-\ifx\endofsubsentencespacing \undefined \let\endofsubsentencespacing \relax \fi
+% \ifx\beginofsubsentence \undefined \unexpanded\def\beginofsubsentence{\hbox{\emdash}} \fi
+% \ifx\endofsubsentence \undefined \unexpanded\def\endofsubsentence {\hbox{\emdash}} \fi
+% \ifx\beginofsubsentencespacing\undefined \let\beginofsubsentencespacing\relax \fi
+% \ifx\endofsubsentencespacing \undefined \let\endofsubsentencespacing \relax \fi
%D The following piece of code is a torture test compound handling. The \type
%D {\relax} before the \type {\ifmmode} is needed because of the alignment scanner
@@ -266,6 +266,9 @@
\futurelet\nextnext\next}%
\next}
+\let\discretionarytoken \relax
+\let\textmodediscretionary\relax
+
\unexpanded\def\activedododotextmodediscretionary#1#2%
{\edef\discretionarytoken{\detokenize{#2}}%
\def\textmodediscretionary{\handletextmodediscretionary{#1}}%
@@ -310,7 +313,7 @@
\prewordbreak\discretionary{\hbox{#2}}{}{\hbox{#2}}\allowbreak\postwordbreak
\fi\fi
\fi\fi
- \nextnextnext} % lookahead in commands
+ \nextnextnext}
%D \macros
%D {directdiscretionary}
@@ -409,14 +412,27 @@
{\prewordbreak\discretionary{\hbox{\normalstartimath|\normalstopimath}}{}{\hbox{\normalstartimath|\normalstopimath}}%
\allowbreak\postwordbreak} % bugged
+% \definetextmodediscretionary <
+% {\beginofsubsentence\prewordbreak\beginofsubsentencespacing}
+%
+% \definetextmodediscretionary >
+% {\endofsubsentencespacing\prewordbreak\endofsubsentence}
+%
+% \definetextmodediscretionary =
+% {\prewordbreak\midsentence\prewordbreak}
+
\definetextmodediscretionary <
- {\beginofsubsentence\prewordbreak\beginofsubsentencespacing}
+ {\beginofsubsentence\prewordbreak\beginofsubsentencespacing
+ \aftergroup\ignorespaces} % tricky, we need to go over the \nextnextnext
\definetextmodediscretionary >
- {\endofsubsentencespacing\prewordbreak\endofsubsentence}
+ {\removeunwantedspaces
+ \endofsubsentencespacing\prewordbreak\endofsubsentence}
\definetextmodediscretionary =
- {\prewordbreak\midsentence\prewordbreak} % {\prewordbreak\compoundhyphen}
+ {\removeunwantedspaces
+ \prewordbreak\midsentence\prewordbreak
+ \aftergroup\ignorespaces}
% french
diff --git a/tex/context/base/mkiv/lang-rep.lua b/tex/context/base/mkiv/lang-rep.lua
index 6fde353f7..172454fc2 100644
--- a/tex/context/base/mkiv/lang-rep.lua
+++ b/tex/context/base/mkiv/lang-rep.lua
@@ -15,7 +15,8 @@ if not modules then modules = { } end modules ['lang-rep'] = {
-- although (given experiences so far) we don't really need that. After all, each problem
-- is somewhat unique.
-local type, tonumber = type, tonumber
+local type, tonumber, next = type, tonumber, next
+local gmatch, gsub = string.gmatch, string.gsub
local utfbyte, utfsplit = utf.byte, utf.split
local P, C, U, Cc, Ct, Cs, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns.utf8character, lpeg.Cc, lpeg.Ct, lpeg.Cs, lpeg.match
local find = string.find
@@ -51,8 +52,6 @@ local getid = nuts.getid
local getchar = nuts.getchar
local isglyph = nuts.isglyph
-local setfield = nuts.setfield
-local getfield = nuts.getfield
local setattr = nuts.setattr
local setlink = nuts.setlink
local setnext = nuts.setnext
@@ -78,6 +77,9 @@ local v_reset = interfaces.variables.reset
local implement = interfaces.implement
+local processors = typesetters.processors
+local splitprocessor = processors.split
+
local replacements = languages.replacements or { }
languages.replacements = replacements
@@ -103,7 +105,8 @@ lists[v_reset].attribute = unsetvalue -- so we discard 0
-- todo: glue kern attr
local function add(root,word,replacement)
- local replacement = lpegmatch(stripper,replacement) or replacement
+ local processor, replacement = splitprocessor(replacement,true) -- no check
+ replacement = lpegmatch(stripper,replacement) or replacement
local list = utfsplit(word,true)
local size = #list
for i=1,size do
@@ -112,16 +115,12 @@ local function add(root,word,replacement)
root[l] = { }
end
if i == size then
- -- local newlist = utfsplit(replacement,true)
- -- for i=1,#newlist do
- -- newlist[i] = utfbyte(newlist[i])
- -- end
local special = find(replacement,"{",1,true)
local newlist = lpegmatch(splitter,replacement)
- --
root[l].final = {
word = word,
replacement = replacement,
+ processor = processor,
oldlength = size,
newcodes = newlist,
special = special,
@@ -142,6 +141,26 @@ function replacements.add(category,word,replacement)
end
end
+-- local strip = lpeg.stripper("{}")
+
+function languages.replacements.addlist(category,list)
+ local root = lists[category].list
+ if type(list) == "string" then
+ for new in gmatch(list,"%S+") do
+ local old = gsub(new,"[{}]","")
+ -- local old = lpegmatch(strip,new)
+ add(root,old,new)
+ end
+ else
+ for i=1,#list do
+ local new = list[i]
+ local old = gsub(new,"[{}]","")
+ -- local old = lpegmatch(strip,new)
+ add(root,old,new)
+ end
+ end
+end
+
local function hit(a,head)
local tree = trees[a]
if tree then
@@ -151,7 +170,10 @@ local function hit(a,head)
local lastrun = false
local lastfinal = false
while current do
- local char = isglyph(current)
+ local char, id = isglyph(current)
+ -- if not char and id == glue_code then
+ -- char = " " -- if needed we can also deal with spaces and special nbsp and such
+ -- end
if char then
local newroot = root[char]
if not newroot then
@@ -194,20 +216,21 @@ local function tonodes(list,template)
return head
end
-
function replacements.handler(head)
head = tonut(head)
- local current = head
- local done = false
+ local current = head
+ local done = false
+ local overload = attributes.applyoverloads
while current do
if getid(current) == glyph_code then
local a = getattr(current,a_replacements)
if a then
local last, final = hit(a,current)
if last then
- local oldlength = final.oldlength
- local newcodes = final.newcodes
- local newlength = #newcodes
+ local precurrent = getprev(current) or head
+ local oldlength = final.oldlength
+ local newcodes = final.newcodes
+ local newlength = newcodes and #newcodes or 0
if trace_replacement then
report_replacement("replacing word %a by %a",final.word,final.replacement)
end
@@ -269,6 +292,9 @@ function replacements.handler(head)
i = i + 1
end
flush_list(list)
+ elseif newlength == 0 then
+ -- nothing gets replaced
+ current = getnext(last)
elseif oldlength == newlength then -- #old == #new
if final.word == final.replacement then
-- nothing to do but skip
@@ -298,10 +324,14 @@ function replacements.handler(head)
current = getnext(current)
end
end
+ if overload then
+ overload(final,getnext(precurrent),getprev(current))
+ end
done = true
end
end
end
+ -- we're one ahead now but we need to because we handle words
current = getnext(current)
end
return tonode(head), done
@@ -338,3 +368,9 @@ implement {
actions = replacements.add,
arguments = { "string", "string", "string" }
}
+
+implement {
+ name = "addreplacementslist",
+ actions = replacements.addlist,
+ arguments = { "string", "string" }
+}
diff --git a/tex/context/base/mkiv/lang-rep.mkiv b/tex/context/base/mkiv/lang-rep.mkiv
index 48e1fd44b..7864b029a 100644
--- a/tex/context/base/mkiv/lang-rep.mkiv
+++ b/tex/context/base/mkiv/lang-rep.mkiv
@@ -19,7 +19,7 @@
\unprotect
-\registerctxluafile{lang-rep}{1.001}
+\registerctxluafile{lang-rep}{}
\definesystemattribute[replacements][public,global]
@@ -57,12 +57,14 @@
{\attribute\replacementsattribute\attributeunsetvalue}
\unexpanded\def\replaceword
- {\dotripleargument\languages_replacements_replace}
+ {\dotripleempty\languages_replacements_replace}
\unexpanded\def\languages_replacements_replace[#1][#2][#3]%
{\ifthirdargument
\clf_addreplacements{#1}{#2}{#3}%
- \fi}
+ \else\ifsecondargument
+ \clf_addreplacementslist{#1}{#2}%
+ \fi\fi}
\appendtoks
\resetreplacements
diff --git a/tex/context/base/mkiv/lang-txt.lua b/tex/context/base/mkiv/lang-txt.lua
index b550ac2b4..b1f50bc80 100644
--- a/tex/context/base/mkiv/lang-txt.lua
+++ b/tex/context/base/mkiv/lang-txt.lua
@@ -252,7 +252,7 @@ data.labels={
["sk"]="P",
},
},
- ["acos"]={
+ ["arccos"]={
["labels"]={
["cz"]="arccos",
["en"]="arccos",
@@ -262,14 +262,14 @@ data.labels={
["sk"]="arccos",
},
},
- ["arccos"]={
+ ["arccosh"]={
["labels"]={
- ["cz"]="arccos",
- ["en"]="arccos",
- ["es"]="arc\\sixperemspace cos",
- ["hr"]="arc\\sixperemspace cos",
- ["pl"]="arc\\sixperemspace cos",
- ["sk"]="arccos",
+ ["cz"]="arccosh",
+ ["en"]="arccosh",
+ ["es"]="arc\\sixperemspace cosh",
+ ["hr"]="arc\\sixperemspace cosh",
+ ["pl"]="arc\\sixperemspace cosh",
+ ["sk"]="arccosh",
},
},
["arcctg"]={
@@ -292,6 +292,16 @@ data.labels={
["sk"]="arcsin",
},
},
+ ["arcsinh"]={
+ ["labels"]={
+ ["cz"]="arcsinh",
+ ["en"]="arcsinh",
+ ["es"]="arc\\sixperemspace senh",
+ ["hr"]="arc\\sixperemspace sinh",
+ ["pl"]="arc\\sixperemspace sinh",
+ ["sk"]="arcsinh",
+ },
+ },
["arctan"]={
["labels"]={
["cz"]="arctg",
@@ -302,13 +312,13 @@ data.labels={
["sk"]="arctg",
},
},
- ["arctg"]={
+ ["arctanh"]={
["labels"]={
- ["cz"]="arctg",
- ["en"]="arctan",
- ["es"]="arc\\sixperemspace tan",
- ["hr"]="arc\\sixperemspace tg",
- ["pl"]="arc\\sixperemspace tg",
+ ["cz"]="arctgh",
+ ["en"]="arctanh",
+ ["es"]="arc\\sixperemspace tanh",
+ ["hr"]="arc\\sixperemspace tgh",
+ ["pl"]="arc\\sixperemspace tgh",
["sk"]="arctg",
},
},
@@ -320,26 +330,6 @@ data.labels={
["sk"]="arg",
},
},
- ["asin"]={
- ["labels"]={
- ["cz"]="arcsin",
- ["en"]="arcsin",
- ["es"]="arc\\sixperemspace sen",
- ["hr"]="arc\\sixperemspace sin",
- ["pl"]="arc\\sixperemspace sin",
- ["sk"]="arcsin",
- },
- },
- ["atan"]={
- ["labels"]={
- ["cz"]="arctg",
- ["en"]="arctan",
- ["es"]="arc\\sixperemspace tan",
- ["hr"]="arc\\sixperemspace tg",
- ["pl"]="arc\\sixperemspace tg",
- ["sk"]="arctg",
- },
- },
["cos"]={
["labels"]={
["cz"]="cos",
@@ -619,16 +609,6 @@ data.labels={
["sk"]="tgh",
},
},
- ["tg"]={
- ["labels"]={
- ["cz"]="tg",
- ["en"]="tan",
- ["es"]="tan",
- ["hr"]="tg",
- ["pl"]="tg",
- ["sk"]="tg",
- },
- },
},
["texts"]={
["and"]={
@@ -3009,8 +2989,13 @@ data.labels={
local functions = data.labels.functions
-functions.asin = functions.arcsin
-functions.acos = functions.arccos
-functions.atan = functions.arctan
+functions.asin = functions.arcsin
+functions.acos = functions.arccos
+functions.atan = functions.arctan
+functions.asinh = functions.arcsinh
+functions.acosh = functions.arccosh
+functions.atanh = functions.arctanh
-table.save("e:/tmp/x.lua",data.labels)
+functions.tg = functions.tan
+functions.arctg = functions.arctan
+functions.arctgh = functions.arctanh
diff --git a/tex/context/base/mkiv/lang-url.lua b/tex/context/base/mkiv/lang-url.lua
index 93c3c02a8..651c6bd52 100644
--- a/tex/context/base/mkiv/lang-url.lua
+++ b/tex/context/base/mkiv/lang-url.lua
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['lang-url'] = {
local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char
local min, max = math.min, math.max
+local concat = table.concat
local context = context
@@ -80,13 +81,67 @@ directives.register("hyphenators.urls.packslashes",function(v)
urls.packslashes = v
end)
-local ctx_a = context.a
-local ctx_b = context.b
-local ctx_d = context.d
-local ctx_c = context.c
-local ctx_l = context.l
-local ctx_C = context.C
-local ctx_L = context.L
+-- local ctx_a = context.a
+-- local ctx_b = context.b
+-- local ctx_d = context.d
+-- local ctx_c = context.c
+-- local ctx_l = context.l
+-- local ctx_C = context.C
+-- local ctx_L = context.L
+
+-- local function action(hyphenatedurl,str,left,right,disc)
+-- --
+-- left = max( left or urls.lefthyphenmin, 2)
+-- right = min(#str-(right or urls.righthyphenmin)+2,#str)
+-- disc = disc or urls.discretionary
+-- --
+-- local word = nil
+-- local prev = nil
+-- local pack = urls.packslashes
+-- local length = 0
+-- --
+-- for char in utfcharacters(str) do
+-- length = length + 1
+-- char = mapping[char] or char
+-- local b = utfbyte(char)
+-- if prev == char and prev == "/" then
+-- ctx_c(b)
+-- elseif char == disc then
+-- ctx_d()
+-- else
+-- if prev == "/" then
+-- ctx_d()
+-- end
+-- local how = characters[char]
+-- if how == v_before then
+-- word = false
+-- ctx_b(b)
+-- elseif how == v_after then
+-- word = false
+-- ctx_a(b)
+-- else
+-- local letter = is_letter[char]
+-- if length <= left or length >= right then
+-- if word and letter then
+-- ctx_L(b)
+-- else
+-- ctx_C(b)
+-- end
+-- elseif word and letter then
+-- ctx_l(b)
+-- else
+-- ctx_c(b)
+-- end
+-- word = letter
+-- end
+-- end
+-- if pack then
+-- prev = char
+-- else
+-- prev = nil
+-- end
+-- end
+-- end
local function action(hyphenatedurl,str,left,right,disc)
--
@@ -95,48 +150,51 @@ local function action(hyphenatedurl,str,left,right,disc)
disc = disc or urls.discretionary
--
local word = nil
- local prev = nil
local pack = urls.packslashes
local length = 0
- --
- for char in utfcharacters(str) do
- length = length + 1
- char = mapping[char] or char
- if prev == char and prev == "/" then
- ctx_c(utfbyte(char))
- elseif char == disc then
- ctx_d()
+ local list = utf.split(str)
+
+ for i=1,#list do
+ local what = nil
+ local dodi = false
+ local char = list[i]
+ length = length + 1
+ char = mapping[char] or char
+ if char == disc then
+ dodi = true
+ elseif pack and char == "/" and list[i+1] == "/" then
+ what = "c"
else
- if prev == "/" then
- ctx_d()
- end
local how = characters[char]
if how == v_before then
- word = false
- ctx_b(utfbyte(char))
+ what = "b"
elseif how == v_after then
word = false
- ctx_a(utfbyte(char))
+ what = "a"
else
local letter = is_letter[char]
if length <= left or length >= right then
if word and letter then
- ctx_L(utfbyte(char))
+ what = "L"
else
- ctx_C(utfbyte(char))
+ what = "C"
end
elseif word and letter then
- ctx_l(utfbyte(char))
+ what = "l"
else
- ctx_c(utfbyte(char))
+ what = "c"
end
word = letter
end
end
- if pack then
- prev = char
+ if dodi then
+ list[i] = "\\d"
+ else
+ list[i] = "\\" .. what .. "{" .. utfbyte(char) .. "}"
end
end
+ list = concat(list)
+ context(list)
end
-- urls.action = function(_,...) action(...) end -- sort of obsolete
diff --git a/tex/context/base/mkiv/lang-url.mkiv b/tex/context/base/mkiv/lang-url.mkiv
index 833a583f9..6afef63ce 100644
--- a/tex/context/base/mkiv/lang-url.mkiv
+++ b/tex/context/base/mkiv/lang-url.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\registerctxluafile{lang-url}{1.001}
+\registerctxluafile{lang-url}{}
\unprotect
diff --git a/tex/context/base/mkiv/lang-wrd.lua b/tex/context/base/mkiv/lang-wrd.lua
index 8b6e48401..b00375cc0 100644
--- a/tex/context/base/mkiv/lang-wrd.lua
+++ b/tex/context/base/mkiv/lang-wrd.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['lang-wrd'] = {
license = "see context related readme files"
}
+local next, tonumber = next, tonumber
local lower = string.lower
local utfchar = utf.char
local concat, setmetatableindex = table.concat, table.setmetatableindex
@@ -32,17 +33,18 @@ local registered = languages.registered
local nuts = nodes.nuts
local tonut = nuts.tonut
-local getfield = nuts.getfield
+----- getfield = nuts.getfield
local getnext = nuts.getnext
local getid = nuts.getid
-local getsubtype = nuts.getsubtype
+----- getsubtype = nuts.getsubtype
local getchar = nuts.getchar
local setattr = nuts.setattr
+----- getattr = nuts.getattr
local getlang = nuts.getlang
-local isglyph = nuts.isglyph
+local ischar = nuts.ischar
local traverse_nodes = nuts.traverse
-local traverse_ids = nuts.traverse_id
+----- traverse_ids = nuts.traverse_id
local wordsdata = words.data
local chardata = characters.data
@@ -51,13 +53,14 @@ local enableaction = nodes.tasks.enableaction
local unsetvalue = attributes.unsetvalue
local nodecodes = nodes.nodecodes
-local kerncodes = nodes.kerncodes
+----- kerncodes = nodes.kerncodes
local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local kern_code = nodecodes.kern
+----- disc_code = nodecodes.disc
+----- kern_code = nodecodes.kern
+
+----- fontkern_code = kerncodes.fontkern
-local kerning_code = kerncodes.kerning
local lowerchar = characters.lower
local a_color = attributes.private('color')
@@ -160,8 +163,8 @@ local function mark_words(head,whenfound) -- can be optimized and shared
-- we haven't done the fonts yet so we have characters (otherwise
-- we'd have to use the tounicodes)
while current do
- local code, id = isglyph(current)
- if code then
+ local code, id = ischar(current) -- not isglyph because otherwise we can run into
+ if code then -- processed streams (\about[foo] does that)
local a = getlang(current)
if a then
if a ~= language then
@@ -183,21 +186,29 @@ local function mark_words(head,whenfound) -- can be optimized and shared
elseif s > 0 then
action()
end
- elseif id == disc_code then -- take the replace
- if n > 0 then
- local r = getfield(current,"replace")
- if r then
- for current in traverse_ids(glyph_code,r) do
- local code = getchar(current)
- n = n + 1
- nds[n] = current
- s = s + 1
- str[s] = utfchar(code)
- end
- end
- end
- elseif id == kern_code and getsubtype(current) == kerning_code and s > 0 then
- -- ok
+ -- elseif id == disc_code then
+ -- -- take the replace .. we kick in before we hyphenate so we're
+ -- -- not yet seeing many discs and we only handle explicit ones
+ -- -- in fact we could as well decide to ignore words with a disc
+ -- -- because we then have a compound word
+ -- if n > 0 then
+ -- local r = getfield(current,"replace")
+ -- if r then
+ -- -- also disc itself
+ -- n = n + 1
+ -- nds[n] = current
+ -- --
+ -- for current in traverse_ids(glyph_code,r) do
+ -- local code = getchar(current)
+ -- n = n + 1
+ -- nds[n] = current
+ -- s = s + 1
+ -- str[s] = utfchar(code)
+ -- end
+ -- end
+ -- end
+ -- elseif id == kern_code and getsubtype(current) == fontkern_code and s > 0 then
+ -- -- ok
elseif s > 0 then
action()
end
diff --git a/tex/context/base/mkiv/lang-wrd.mkiv b/tex/context/base/mkiv/lang-wrd.mkiv
index 8c544773b..75eec48b1 100644
--- a/tex/context/base/mkiv/lang-wrd.mkiv
+++ b/tex/context/base/mkiv/lang-wrd.mkiv
@@ -15,7 +15,7 @@
\writestatus{loading}{ConTeXt Language Macros / Checking}
-\registerctxluafile{lang-wrd}{1.001}
+\registerctxluafile{lang-wrd}{}
\unprotect
diff --git a/tex/context/base/mkiv/layo-ini.mkiv b/tex/context/base/mkiv/layo-ini.mkiv
index 9097265c9..fc0286fdd 100644
--- a/tex/context/base/mkiv/layo-ini.mkiv
+++ b/tex/context/base/mkiv/layo-ini.mkiv
@@ -26,6 +26,6 @@
\ifdefined\pagenoshift \else \newcount \pagenoshift \fi
\ifdefined\realpageno \else \newcount \realpageno \fi
-\registerctxluafile{layo-ini}{1.001}
+\registerctxluafile{layo-ini}{}
\protect \endinput
diff --git a/tex/context/base/mkiv/lpdf-ano.lua b/tex/context/base/mkiv/lpdf-ano.lua
index e89bda12b..01f015b72 100644
--- a/tex/context/base/mkiv/lpdf-ano.lua
+++ b/tex/context/base/mkiv/lpdf-ano.lua
@@ -13,7 +13,7 @@ if not modules then modules = { } end modules ['lpdf-ano'] = {
-- internal references are indicated by a number (and turned into )
-- we only flush internal destinations that are referred
-local next, tostring, tonumber, rawget = next, tostring, tonumber, rawget
+local next, tostring, tonumber, rawget, type = next, tostring, tonumber, rawget, type
local rep, format, find = string.rep, string.format, string.find
local min = math.min
local lpegmatch = lpeg.match
@@ -28,9 +28,9 @@ local trace_bookmarks = false trackers.register("references.bookmarks",
local log_destinations = false directives.register("destinations.log", function(v) log_destinations = v end)
local untex_urls = true directives.register("references.untexurls", function(v) untex_urls = v end)
-local report_reference = logs.reporter("backend","references")
-local report_destination = logs.reporter("backend","destinations")
-local report_bookmark = logs.reporter("backend","bookmarks")
+local report_references = logs.reporter("backend","references")
+local report_destinations = logs.reporter("backend","destinations")
+local report_bookmarks = logs.reporter("backend","bookmarks")
local variables = interfaces.variables
local v_auto = variables.auto
@@ -160,7 +160,7 @@ local pagedestinations = setmetatableindex(function(t,k)
end
local v = rawget(t,k)
if v then
- -- report_reference("page number expected, got %s: %a",type(k),k)
+ -- report_references("page number expected, got %s: %a",type(k),k)
return v
end
local v = k > 0 and pdfarray {
@@ -197,7 +197,7 @@ local destinations = { } -- to be used soon
local function pdfregisterdestination(name,reference)
local d = destinations[name]
if d then
- report_destination("ignoring duplicate destination %a with reference %a",name,reference)
+ report_destinations("ignoring duplicate destination %a with reference %a",name,reference)
else
destinations[name] = reference
end
@@ -207,25 +207,16 @@ lpdf.registerdestination = pdfregisterdestination
local maxslice = 32 -- could be made configureable ... 64 is also ok
-luatex.registerstopactions(function()
+logs.registerfinalactions(function()
if log_destinations and next(destinations) then
- local logsnewline = logs.newline
- local log_destinations = logs.reporter("system","references")
- local log_destination = logs.reporter("destination")
- logs.pushtarget("logfile")
- logsnewline()
- log_destinations("start used destinations")
- logsnewline()
+ local report = logs.startfilelogging("references","used destinations")
local n = 0
for destination, pagenumber in table.sortedhash(destinations) do
- log_destination("% 4i : %-5s : %s",pagenumber,usedviews[destination] or defaultview,destination)
+ report("% 4i : %-5s : %s",pagenumber,usedviews[destination] or defaultview,destination)
n = n + 1
end
- logsnewline()
- log_destinations("stop used destinations")
- logsnewline()
- logs.poptarget()
- report_destination("%s destinations saved in log file",n)
+ logs.stopfilelogging()
+ report_destinations("%s destinations saved in log file",n)
end
end)
@@ -433,7 +424,7 @@ function nodeinjections.destination(width,height,depth,names,view)
-- todo check if begin end node / was comment
view = view and mapping[view] or defaultview
if trace_destinations then
- report_destination("width %p, height %p, depth %p, names %|t, view %a",width,height,depth,names,view)
+ report_destinations("width %p, height %p, depth %p, names %|t, view %a",width,height,depth,names,view)
end
local method = references.innermethod
local noview = view == defaultview
@@ -567,13 +558,13 @@ local function pdffilelink(filename,destination,page,actions)
return false
end
filename = file.addsuffix(filename,"pdf")
- if not destination or destination == "" then
+ if (not destination or destination == "") or (references.outermethod == v_page) then
destination = pdfarray { (page or 0) - 1, pdf_fit }
end
return pdfdictionary {
S = pdf_gotor, -- can also be pdf_launch
F = filename,
- D = destination or defaultdestination, -- D is mandate
+ D = destination or defaultdestination,
NewWindow = actions.newwindow and true or nil,
}
end
@@ -674,7 +665,7 @@ end
-- local function reference(width,height,depth,prerolled) -- keep this one
-- if prerolled then
-- if trace_references then
--- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+-- report_references("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
-- end
-- return pdfannotation_node(width,height,depth,prerolled)
-- end
@@ -715,6 +706,9 @@ local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay
end
local function finishannotation(width,height,depth,prerolled,r)
+ if type(prerolled) == "function" then
+ prerolled = prerolled()
+ end
local annot = f_annot(prerolled,pdfrectangle(width,height,depth))
if r then
pdfdelayedobject(annot,r)
@@ -728,7 +722,7 @@ end
function nodeinjections.reference(width,height,depth,prerolled)
if prerolled then
if trace_references then
- report_reference("link: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+ report_references("link: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
end
return new_latelua(function() finishreference(width,height,depth,prerolled) end)
end
@@ -737,7 +731,8 @@ end
function nodeinjections.annotation(width,height,depth,prerolled,r)
if prerolled then
if trace_references then
- report_reference("special: width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+ report_references("special: width %p, height %p, depth %p, prerolled %a",width,height,depth,
+ type(prerolled) == "string" and prerolled or "-")
end
return new_latelua(function() finishannotation(width,height,depth,prerolled,r or false) end)
end
@@ -822,7 +817,7 @@ runners["inner"] = function(var,actions)
end
runners["inner with arguments"] = function(var,actions)
- report_reference("todo: inner with arguments")
+ report_references("todo: inner with arguments")
return false
end
@@ -845,7 +840,7 @@ runners["special outer with operation"] = function(var,actions)
end
runners["special outer"] = function(var,actions)
- report_reference("todo: special outer")
+ report_references("todo: special outer")
return false
end
@@ -855,22 +850,22 @@ runners["special"] = function(var,actions)
end
runners["outer with inner with arguments"] = function(var,actions)
- report_reference("todo: outer with inner with arguments")
+ report_references("todo: outer with inner with arguments")
return false
end
runners["outer with special and operation and arguments"] = function(var,actions)
- report_reference("todo: outer with special and operation and arguments")
+ report_references("todo: outer with special and operation and arguments")
return false
end
runners["outer with special"] = function(var,actions)
- report_reference("todo: outer with special")
+ report_references("todo: outer with special")
return false
end
runners["outer with special and operation"] = function(var,actions)
- report_reference("todo: outer with special and operation")
+ report_references("todo: outer with special and operation")
return false
end
@@ -882,7 +877,7 @@ function specials.internal(var,actions) -- better resolve in strc-ref
local v = i and references.internals[i]
if not v then
-- error
- report_reference("no internal reference %a",i or "")
+ report_references("no internal reference %a",i or "")
else
flaginternals[i] = true
return pdflinkinternal(i,v.references.realpage)
@@ -1182,17 +1177,17 @@ local function build(levels,start,parent,method,nested)
if entry then
pdfflushobject(child,entry)
else
- report_bookmark("error 1")
+ report_bookmarks("error 1")
end
return i, n, first, last
else
- report_bookmark("confusing level change at level %a around %a",level,title)
+ report_bookmarks("confusing level change at level %a around %a",level,title)
startlevel = level
end
end
if level == startlevel then
if trace_bookmarks then
- report_bookmark("%3i %w%s %s",realpage,(level-1)*2,(opened and "+") or "-",title)
+ report_bookmarks("%3i %w%s %s",realpage,(level-1)*2,(opened and "+") or "-",title)
end
local prev = child
child = pdfreserveobject()
@@ -1231,7 +1226,7 @@ local function build(levels,start,parent,method,nested)
entry.Last = pdfreference(l)
end
else
- report_bookmark("error 2")
+ report_bookmarks("error 2")
end
else
-- missing intermediate level but ok
@@ -1244,7 +1239,7 @@ local function build(levels,start,parent,method,nested)
end
pdfflushobject(child,entry)
else
- report_bookmark("error 3")
+ report_bookmarks("error 3")
end
return i, n, first, last
end
diff --git a/tex/context/base/mkiv/lpdf-col.lua b/tex/context/base/mkiv/lpdf-col.lua
index b5973ba88..d1a1af97d 100644
--- a/tex/context/base/mkiv/lpdf-col.lua
+++ b/tex/context/base/mkiv/lpdf-col.lua
@@ -16,15 +16,14 @@ local formatters = string.formatters
local backends, lpdf, nodes = backends, lpdf, nodes
local allocate = utilities.storage.allocate
-local formatters = string.formatters
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
local registrations = backends.pdf.registrations
-local nodepool = nodes.pool
+local nodepool = nodes.nuts.pool
local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
+local pdfpageliteral = nodepool.pdfpageliteral
local pdfconstant = lpdf.constant
local pdfdictionary = lpdf.dictionary
@@ -42,13 +41,20 @@ local adddocumentcolorspace = lpdf.adddocumentcolorspace
local adddocumentextgstate = lpdf.adddocumentextgstate
local colors = attributes.colors
-local transparencies = attributes.transparencies
-local registertransparancy = transparencies.register
local registercolor = colors.register
local colorsvalue = colors.value
-local transparenciesvalue = transparencies.value
local forcedmodel = colors.forcedmodel
local getpagecolormodel = colors.getpagecolormodel
+local colortoattributes = colors.toattributes
+
+local transparencies = attributes.transparencies
+local registertransparancy = transparencies.register
+local transparenciesvalue = transparencies.value
+local transparencytoattribute = transparencies.toattribute
+
+local unsetvalue = attributes.unsetvalue
+
+local setmetatableindex = table.setmetatableindex
local c_transparency = pdfconstant("Transparency")
@@ -57,7 +63,7 @@ local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
local f_tr = formatters["Tr%s"]
-local f_cm = formatters["q %F %F %F %F %F %F cm"]
+local f_cm = formatters["q %.6F %.6F %.6F %.6F %.6F %.6F cm"]
local f_effect = formatters["%s Tc %s w %s Tr"]
local f_tr_gs = formatters["/Tr%s gs"]
local f_num_1 = tostring
@@ -81,7 +87,7 @@ local transparencygroups = { }
lpdf.colorspaceconstants = colorspaceconstants
lpdf.transparencygroups = transparencygroups
-table.setmetatableindex(transparencygroups, function(transparencygroups,colormodel)
+setmetatableindex(transparencygroups, function(transparencygroups,colormodel)
local cs = colorspaceconstants[colormodel]
if cs then
local d = pdfdictionary {
@@ -116,26 +122,26 @@ lpdf.registerpagefinalizer(addpagegroup,3,"pagegroup")
-- color injection
function nodeinjections.rgbcolor(r,g,b)
- return register(pdfliteral(f_rgb(r,g,b,r,g,b)))
+ return register(pdfpageliteral(f_rgb(r,g,b,r,g,b)))
end
function nodeinjections.cmykcolor(c,m,y,k)
- return register(pdfliteral(f_cmyk(c,m,y,k,c,m,y,k)))
+ return register(pdfpageliteral(f_cmyk(c,m,y,k,c,m,y,k)))
end
function nodeinjections.graycolor(s) -- caching 0/1 does not pay off
- return register(pdfliteral(f_gray(s,s)))
+ return register(pdfpageliteral(f_gray(s,s)))
end
function nodeinjections.spotcolor(n,f,d,p)
if type(p) == "string" then
p = gsub(p,","," ") -- brr misuse of spot
end
- return register(pdfliteral(f_spot(n,n,p,p)))
+ return register(pdfpageliteral(f_spot(n,n,p,p)))
end
function nodeinjections.transparency(n)
- return register(pdfliteral(f_tr_gs(n)))
+ return register(pdfpageliteral(f_tr_gs(n)))
end
-- a bit weird but let's keep it here for a while
@@ -154,7 +160,7 @@ function nodeinjections.effect(effect,stretch,rulethickness)
-- always, no zero test (removed)
rulethickness = bp * rulethickness
effect = effects[effect] or effects['normal']
- return register(pdfliteral(f_effect(stretch,rulethickness,effect))) -- watch order
+ return register(pdfpageliteral(f_effect(stretch,rulethickness,effect))) -- watch order
end
-- spot- and indexcolors
@@ -701,31 +707,113 @@ function lpdf.finishtransparencycode()
end
end
--- this will move to lpdf-spe.lua
+-- this will move to lpdf-spe.lua an dwe then can also add a metatable with
+-- normal context colors
+
+do
+
+ local pdfcolor = lpdf.color
+ local pdftransparency = lpdf.transparency
+
+ local f_slant = formatters["q 1 0 %.6F 1 0 0 cm"]
+
+ -- local fillcolors = {
+ -- red = { "pdf", "origin", "1 0 0 rg" },
+ -- green = { "pdf", "origin", "0 1 0 rg" },
+ -- blue = { "pdf", "origin", "0 0 1 rg" },
+ -- gray = { "pdf", "origin", ".5 g" },
+ -- black = { "pdf", "origin", "0 g" },
+ -- palered = { "pdf", "origin", "1 .75 .75 rg" },
+ -- palegreen = { "pdf", "origin", ".75 1 .75 rg" },
+ -- paleblue = { "pdf", "origin", ".75 .75 1 rg" },
+ -- palegray = { "pdf", "origin", ".75 g" },
+ -- }
+ --
+ -- local strokecolors = {
+ -- red = { "pdf", "origin", "1 0 0 RG" },
+ -- green = { "pdf", "origin", "0 1 0 RG" },
+ -- blue = { "pdf", "origin", "0 0 1 RG" },
+ -- gray = { "pdf", "origin", ".5 G" },
+ -- black = { "pdf", "origin", "0 G" },
+ -- palered = { "pdf", "origin", "1 .75 .75 RG" },
+ -- palegreen = { "pdf", "origin", ".75 1 .75 RG" },
+ -- paleblue = { "pdf", "origin", ".75 .75 1 RG" },
+ -- palegray = { "pdf", "origin", ".75 G" },
+ -- }
+ --
+ -- backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
+ --
+ -- red = { "pdf", "origin", "1 0 0 rg 1 0 0 RG" },
+ -- green = { "pdf", "origin", "0 1 0 rg 0 1 0 RG" },
+ -- blue = { "pdf", "origin", "0 0 1 rg 0 0 1 RG" },
+ -- gray = { "pdf", "origin", ".75 g .75 G" },
+ -- black = { "pdf", "origin", "0 g 0 G" },
+ --
+ -- -- rulecolors = fillcolors,
+ -- -- fillcolors = fillcolors,
+ -- -- strokecolors = strokecolors,
+ --
+ -- startslant = function(a) return { "pdf", "origin", f_slant(a) } end,
+ -- stopslant = { "pdf", "origin", "Q" },
+ --
+ -- }
+
+ local slants = setmetatableindex(function(t,k)
+ local v = { "pdf", "origin", f_slant(a) }
+ t[k] = v
+ return k
+ end)
+
+ local function startslant(a)
+ return slants[a]
+ end
-local f_slant = formatters["pdf: q 1 0 %F 1 0 0 cm"]
+ local c_cache = setmetatableindex(function(t,m)
+ local v = setmetatableindex(function(t,c)
+ local p = { "pdf", "origin", "q " .. pdfcolor(m,c) }
+ t[c] = p
+ return p
+ end)
+ t[m] = v
+ return v
+ end)
-backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
+ -- we inherit the outer transparency
+
+ local t_cache = setmetatableindex(function(t,transparency)
+ local p = pdftransparency(transparency)
+ local v = setmetatableindex(function(t,colormodel)
+ local v = setmetatableindex(function(t,color)
+ local v = { "pdf", "origin", "q " .. pdfcolor(colormodel,color) .. " " .. p }
+ t[color] = v
+ return v
+ end)
+ t[colormodel] = v
+ return v
+ end)
+ t[transparency] = v
+ return v
+ end)
- red = { "special", 'pdf: 1 0 0 rg 1 0 0 RG' },
- green = { "special", 'pdf: 0 1 0 rg 0 1 0 RG' },
- blue = { "special", 'pdf: 0 0 1 rg 0 0 1 RG' },
- gray = { "special", 'pdf: .75 g .75 G' },
- black = { "special", 'pdf: 0 g 0 G' },
+ local function startcolor(k)
+ local m, c = colortoattributes(k)
+ local t = transparencytoattribute(k)
+ if t then
+ return t_cache[t][m][c]
+ else
+ return c_cache[m][c]
+ end
+ end
- rulecolors = {
- red = { "special", 'pdf: 1 0 0 rg' },
- green = { "special", 'pdf: 0 1 0 rg' },
- blue = { "special", 'pdf: 0 0 1 rg' },
- gray = { "special", 'pdf: .5 g' },
- black = { "special", 'pdf: 0 g' },
- palered = { "special", 'pdf: 1 .75 .75 rg' },
- palegreen = { "special", 'pdf: .75 1 .75 rg' },
- paleblue = { "special", 'pdf: .75 .75 1 rg' },
- palegray = { "special", 'pdf: .75 g' },
- },
+ backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
- startslant = function(a) return { "special", f_slant(a) } end,
- stopslant = { "special", "pdf: Q" },
+ startcolor = startcolor,
+ -- stopcolor = { "pdf", "origin", "0 g 0 G Q" },
+ stopcolor = { "pdf", "origin", "Q" },
-}
+ startslant = startslant,
+ stopslant = { "pdf", "origin", "Q" },
+
+ }
+
+end
diff --git a/tex/context/base/mkiv/lpdf-epa.lua b/tex/context/base/mkiv/lpdf-epa.lua
index c72e2a424..89b2c6e0e 100644
--- a/tex/context/base/mkiv/lpdf-epa.lua
+++ b/tex/context/base/mkiv/lpdf-epa.lua
@@ -6,13 +6,15 @@ if not modules then modules = { } end modules ['lpdf-epa'] = {
license = "see context related readme files"
}
--- This is a rather experimental feature and the code will probably
--- change.
+-- This is a rather experimental feature and the code will probably change.
local type, tonumber = type, tonumber
local format, gsub, lower = string.format, string.gsub, string.lower
local formatters = string.formatters
local abs = math.abs
+local expandname = file.expandname
+local allocate = utilities.storage.allocate
+local isfile = lfs.isfile
----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
@@ -20,6 +22,8 @@ local trace_links = false trackers.register("figures.links", function(v)
local trace_outlines = false trackers.register("figures.outliness", function(v) trace_outlines = v end)
local report_link = logs.reporter("backend","link")
+local report_comment = logs.reporter("backend","comment")
+local report_field = logs.reporter("backend","field")
local report_outline = logs.reporter("backend","outline")
local epdf = epdf
@@ -39,12 +43,29 @@ local escapetex = characters.filters.utf.private.escape
local bookmarks = structures.bookmarks
-local maxdimen = 2^30-1
+local maxdimen = 0x3FFFFFFF -- 2^30-1
local layerspec = { -- predefining saves time
"epdflinks"
}
+local collected = allocate()
+local tobesaved = allocate()
+
+local jobembedded = {
+ collected = collected,
+ tobesaved = tobesaved,
+}
+
+job.embedded = jobembedded
+
+local function initializer()
+ tobesaved = jobembedded.tobesaved
+ collected = jobembedded.collected
+end
+
+job.register('job.embedded.collected',tobesaved,initializer)
+
local f_namespace = formatters["lpdf-epa-%s-"]
local function makenamespace(filename)
@@ -114,6 +135,16 @@ end
-- (see section 3.10 in the 1.7 reference) so we need to test for string as well
-- as a table. TH/20140916
+-- When embedded is set then files need to have page references which is seldom the
+-- case but you can generate them with context:
+--
+-- \setupinteraction[state=start,page={page,page}]
+--
+-- see tests/mkiv/interaction/cross[1|2|3].tex for an example
+
+local embedded = false directives.register("figures.embedded", function(v) embedded = v end)
+local reported = { }
+
local function link_file(x,y,w,h,document,annotation)
local a = annotation.A
if a then
@@ -129,9 +160,20 @@ local function link_file(x,y,w,h,document,annotation)
elseif type(destination) == "string" then
add_link(x,y,w,h,formatters["%s::%s"](filename,destination),"file (named)")
else
- destination = destination[1] -- array
- if tonumber(destination) then
- add_link(x,y,w,h,formatters["%s::page(%s)"](filename,destination),"file (page)")
+ -- hm, zero offset so maybe: destination + 1
+ destination = tonumber(destination[1]) -- array
+ if destination then
+ destination = destination + 1
+ local loaded = collected[lower(expandname(filename))]
+ if embedded and loaded then
+ add_link(x,y,w,h,makenamespace(filename) .. destination,what)
+ else
+ if loaded and not reported[filename] then
+ report_link("reference to an also loaded file %a, consider using directive: figures.embedded",filename)
+ reported[filename] = true
+ end
+ add_link(x,y,w,h,formatters["%s::page(%s)"](filename,destination),"file (page)")
+ end
else
add_link(x,y,w,h,formatters["file(%s)"](filename),"file")
end
@@ -145,77 +187,82 @@ function codeinjections.mergereferences(specification)
specification = figures and figures.current()
specification = specification and specification.status
end
- if specification then
- local fullname = specification.fullname
- local document = loadpdffile(fullname) -- costs time
- if document then
- local pagenumber = specification.page or 1
- local xscale = specification.yscale or 1
- local yscale = specification.yscale or 1
- local size = specification.size or "crop" -- todo
- local pagedata = document.pages[pagenumber]
- local annotations = pagedata and pagedata.Annots
- local namespace = makenamespace(fullname)
- local reference = namespace .. pagenumber
- if annotations and annotations.n > 0 then
- local mediabox = pagedata.MediaBox
- local llx = mediabox[1]
- local lly = mediabox[2]
- local urx = mediabox[3]
- local ury = mediabox[4]
- local width = xscale * (urx - llx) -- \\overlaywidth, \\overlayheight
- local height = yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
- context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" })
- for i=1,annotations.n do
- local annotation = annotations[i]
- if annotation then
- local subtype = annotation.Subtype
- local rectangle = annotation.Rect
- local a_llx = rectangle[1]
- local a_lly = rectangle[2]
- local a_urx = rectangle[3]
- local a_ury = rectangle[4]
- local x = xscale * (a_llx - llx)
- local y = yscale * (a_lly - lly)
- local w = xscale * (a_urx - a_llx)
- local h = yscale * (a_ury - a_lly)
- if subtype == "Link" then
- local a = annotation.A
- if not a then
- report_link("missing link annotation")
- elseif w > width or h > height or w < 0 or h < 0 or abs(x) > (maxdimen/2) or abs(y) > (maxdimen/2) then
- report_link("broken link rectangle [%f %f %f %f] (max: %f)",a_llx,a_lly,a_urx,a_ury,maxdimen/2)
- else
- local linktype = a.S
- if linktype == "GoTo" then
- link_goto(x,y,w,h,document,annotation,pagedata,namespace)
- elseif linktype == "GoToR" then
- link_file(x,y,w,h,document,annotation)
- elseif linktype == "URI" then
- link_uri(x,y,w,h,document,annotation)
- elseif trace_links then
- report_link("unsupported link annotation %a",linktype)
- end
- end
+ if not specification then
+ return ""
+ end
+ local fullname = specification.fullname
+ local expanded = lower(expandname(fullname))
+ -- we could add a check for duplicate page insertion
+ tobesaved[expanded] = true
+ --- but that is messy anyway so we forget about it
+ local document = loadpdffile(fullname) -- costs time
+ if not document then
+ return ""
+ end
+ local pagenumber = specification.page or 1
+ local xscale = specification.yscale or 1
+ local yscale = specification.yscale or 1
+ local size = specification.size or "crop" -- todo
+ local pagedata = document.pages[pagenumber]
+ local annotations = pagedata and pagedata.Annots
+ local namespace = makenamespace(fullname)
+ local reference = namespace .. pagenumber
+ if annotations and annotations.n > 0 then
+ local mediabox = pagedata.MediaBox
+ local llx = mediabox[1]
+ local lly = mediabox[2]
+ local urx = mediabox[3]
+ local ury = mediabox[4]
+ local width = xscale * (urx - llx) -- \\overlaywidth, \\overlayheight
+ local height = yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
+ context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" })
+ for i=1,annotations.n do
+ local annotation = annotations[i]
+ if annotation then
+ local subtype = annotation.Subtype
+ local rectangle = annotation.Rect
+ local a_llx = rectangle[1]
+ local a_lly = rectangle[2]
+ local a_urx = rectangle[3]
+ local a_ury = rectangle[4]
+ local x = xscale * (a_llx - llx)
+ local y = yscale * (a_lly - lly)
+ local w = xscale * (a_urx - a_llx)
+ local h = yscale * (a_ury - a_lly)
+ if subtype == "Link" then
+ local a = annotation.A
+ if not a then
+ report_link("missing link annotation")
+ elseif w > width or h > height or w < 0 or h < 0 or abs(x) > (maxdimen/2) or abs(y) > (maxdimen/2) then
+ report_link("broken link rectangle [%.6F %.6F %.6F %.6F] (max: %.6F)",a_llx,a_lly,a_urx,a_ury,maxdimen/2)
+ else
+ local linktype = a.S
+ if linktype == "GoTo" then
+ link_goto(x,y,w,h,document,annotation,pagedata,namespace)
+ elseif linktype == "GoToR" then
+ link_file(x,y,w,h,document,annotation)
+ elseif linktype == "URI" then
+ link_uri(x,y,w,h,document,annotation)
elseif trace_links then
- report_link("unsupported annotation %a",subtype)
+ report_link("unsupported link annotation %a",linktype)
end
- elseif trace_links then
- report_link("broken annotation, index %a",i)
end
+ elseif trace_links then
+ report_link("unsupported annotation %a",subtype)
end
- context.flushlayer { "epdflinks" }
- end
- -- moved outside previous test
- context.setgvalue("figurereference",reference) -- global
- if trace_links then
- report_link("setting figure reference to %a",reference)
+ elseif trace_links then
+ report_link("broken annotation, index %a",i)
end
- specification.reference = reference
- return namespace
end
+ context.flushlayer { "epdflinks" }
end
- return ""-- no namespace, empty, not nil
+ -- moved outside previous test
+ context.setgvalue("figurereference",reference) -- global
+ if trace_links then
+ report_link("setting figure reference to %a",reference)
+ end
+ specification.reference = reference
+ return namespace
end
function codeinjections.mergeviewerlayers(specification)
@@ -280,7 +327,7 @@ function codeinjections.getbookmarks(filename)
local document = nil
- if lfs.isfile(filename) then
+ if isfile(filename) then
document = loadpdffile(filename)
else
report_outline("unknown file %a",filename)
@@ -303,15 +350,22 @@ function codeinjections.getbookmarks(filename)
local subtype = action.S
if subtype == "GoTo" then
destination = action.D
- if type(destination) == "string" then
+ local kind = type(destination)
+ if kind == "string" then
entry.destination = destination
destination = destinations[destination]
local pagedata = destination and destination[1]
if pagedata then
entry.realpage = pagedata.number
end
- else
- -- maybe
+ elseif kind == "table" then
+ local pageref = destination.n
+ if pageref then
+ local pagedata = pages[pageref]
+ if pagedata then
+ entry.realpage = pagedata.number
+ end
+ end
end
else
-- maybe
@@ -416,3 +470,82 @@ function codeinjections.mergebookmarks(specification)
end
end
end
+
+-- placeholders:
+
+function codeinjections.mergecomments(specification)
+ report_comment("unfinished experimental code, not used yet")
+end
+
+function codeinjections.mergefields(specification)
+ report_field("unfinished experimental code, not used yet")
+end
+
+-- A bit more than a placeholder but in the same perspective as
+-- inclusion of comments and fields:
+--
+-- getinfo{ filename = "tt.pdf", metadata = true }
+-- getinfo{ filename = "tt.pdf", page = 1, metadata = "xml" }
+-- getinfo("tt.pdf")
+
+function codeinjections.getinfo(specification)
+ if type(specification) == "string" then
+ specification = { filename = specification }
+ end
+ local filename = specification.filename
+ if type(filename) == "string" and isfile(filename) then
+ local pdffile = loadpdffile(filename)
+ if pdffile then
+ local pagenumber = specification.page or 1
+ local metadata = specification.metadata
+ local catalog = pdffile.Catalog
+ local info = pdffile.Info
+ local pages = pdffile.pages
+ local nofpages = pages.n
+ if metadata then
+ local m = catalog.Metadata
+ if m then
+ m = m()
+ if metadata == "xml" then
+ metadata = xml.convert(m)
+ else
+ metadata = m
+ end
+ else
+ metadata = nil
+ end
+ else
+ metadata = nil
+ end
+ if pagenumber > nofpages then
+ pagenumber = nofpages
+ end
+ local nobox = { 0, 0, 0, 0 }
+ local crop = nobox
+ local media = nobox
+ local page = pages[pagenumber]
+ if page then
+ crop = page.CropBox or nobox
+ media = page.MediaBox or crop or nobox
+ crop.n = nil -- nicer
+ media.n = nil -- nicer
+ end
+ local bbox = crop or media or nobox
+ return {
+ filename = filename,
+ pdfversion = tonumber(catalog.Version),
+ nofpages = nofpages,
+ title = info.Title,
+ creator = info.Creator,
+ producer = info.Producer,
+ creationdate = info.CreationDate,
+ modification = info.ModDate,
+ metadata = metadata,
+ width = bbox[4] - bbox[2],
+ height = bbox[3] - bbox[1],
+ cropbox = { crop[1], crop[2], crop[3], crop[4] }, -- we need access
+ mediabox = { media[1], media[2], media[3], media[4] } , -- we need access
+ }
+ end
+ end
+end
diff --git a/tex/context/base/mkiv/lpdf-epd.lua b/tex/context/base/mkiv/lpdf-epd.lua
index 0f62d6189..2fcd28473 100644
--- a/tex/context/base/mkiv/lpdf-epd.lua
+++ b/tex/context/base/mkiv/lpdf-epd.lua
@@ -31,71 +31,86 @@ if not modules then modules = { } end modules ['lpdf-epd'] = {
-- a safer bet is foo("Title") which will return a decoded string (or the original if it
-- already was unicode).
-local setmetatable, rawset, rawget, type = setmetatable, rawset, rawget, type
+local setmetatable, rawset, rawget, type, next = setmetatable, rawset, rawget, type, next
local tostring, tonumber = tostring, tonumber
local lower, match, char, byte, find = string.lower, string.match, string.char, string.byte, string.find
local abs = math.abs
local concat = table.concat
local toutf, toeight, utfchar = string.toutf, utf.toeight, utf.char
+local setmetatableindex = table.setmetatableindex
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local P, C, S, R, Ct, Cc, V, Carg, Cs, Cf, Cg = lpeg.P, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cc, lpeg.V, lpeg.Carg, lpeg.Cs, lpeg.Cf, lpeg.Cg
-local epdf = epdf
- lpdf = lpdf or { }
-local lpdf = lpdf
-local lpdf_epdf = { }
-lpdf.epdf = lpdf_epdf
+local epdf = epdf
+ lpdf = lpdf or { }
+local lpdf = lpdf
+local lpdf_epdf = { }
+lpdf.epdf = lpdf_epdf
-local pdf_open = epdf.open
+-- local getDict, getArray, getReal, getNum, getString, getBool, getName, getRef, getRefNum
+-- local getType, getTypeName
+-- local dictGetLength, dictGetVal, dictGetValNF, dictGetKey
+-- local arrayGetLength, arrayGetNF, arrayGet
+-- local streamReset, streamGetDict, streamGetChar
-local report_epdf = logs.reporter("epdf")
+-- We use as little as possible and also not an object interface. After all, we
+-- don't know how the library (and its api) evolves so we better can be prepared
+-- for wrappers.
-local getDict, getArray, getReal, getNum, getString, getBool, getName, getRef, getRefNum
-local getType, getTypeName
-local dictGetLength, dictGetVal, dictGetValNF, dictGetKey
-local arrayGetLength, arrayGetNF, arrayGet
-local streamReset, streamGetDict, streamGetChar
+local registry = debug.getregistry()
-do
- local object = epdf.Object()
- --
- getDict = object.getDict
- getArray = object.getArray
- getReal = object.getReal
- getNum = object.getNum
- getString = object.getString
- getBool = object.getBool
- getName = object.getName
- getRef = object.getRef
- getRefNum = object.getRefNum
- --
- getType = object.getType
- getTypeName = object.getTypeName
- --
- streamReset = object.streamReset
- streamGetDict = object.streamGetDict
- streamGetChar = object.streamGetChar
- --
-end
+local object = registry["epdf.Object"]
+local dictionary = registry["epdf.Dict"]
+local array = registry["epdf.Array"]
+local xref = registry["epdf.XRef"]
+local catalog = registry["epdf.Catalog"]
+local pdfdoc = registry["epdf.PDFDoc"]
-local function initialize_methods(xref)
- local dictionary = epdf.Dict(xref)
- local array = epdf.Array(xref)
- --
- dictGetLength = dictionary.getLength
- dictGetVal = dictionary.getVal
- dictGetValNF = dictionary.getValNF
- dictGetKey = dictionary.getKey
- --
- arrayGetLength = array.getLength
- arrayGetNF = array.getNF
- arrayGet = array.get
- --
- initialize_methods = function()
- -- already done
- end
-end
+local openPDF = epdf.open
+
+local getDict = object.getDict
+local getArray = object.getArray
+local getReal = object.getReal
+local getInt = object.getInt
+local getNum = object.getNum
+local getString = object.getString
+local getBool = object.getBool
+local getName = object.getName
+local getRef = object.getRef
+local getRefNum = object.getRefNum
+
+local getType = object.getType
+local getTypeName = object.getTypeName
+
+local streamReset = object.streamReset
+local streamGetDict = object.streamGetDict
+local streamGetChar = object.streamGetChar
+
+local dictGetLength = dictionary.getLength
+local dictGetVal = dictionary.getVal
+local dictGetValNF = dictionary.getValNF
+local dictGetKey = dictionary.getKey
+
+local arrayGetLength = array.getLength
+local arrayGetNF = array.getNF
+local arrayGet = array.get
+
+-- these are kind of weird as they can't be accessed by (root) object
+
+local getNumPages = catalog.getNumPages
+local getPageRef = catalog.getPageRef
+
+local getXRef = pdfdoc.getXRef
+local getRawCatalog = pdfdoc.getCatalog
+
+local fetch = xref.fetch
+local getCatalog = xref.getCatalog
+local getDocInfo = xref.getDocInfo
+
+-- we're done with library shortcuts
+
+local report_epdf = logs.reporter("epdf")
local typenames = { [0] =
"boolean",
@@ -139,15 +154,50 @@ local checked_access
local frompdfdoc = lpdf.frompdfdoc
-local function get_flagged(t,f,k)
- local fk = f[k]
- if not fk then
- return t[k]
- elseif fk == "rawtext" then
- return frompdfdoc(t[k])
- else -- no other flags yet
- return t[k]
+local get_flagged
+
+if lpdf.dictionary then
+
+ local pdfdictionary = lpdf.dictionary
+ local pdfarray = lpdf.array
+ local pdfconstant = lpdf.constant
+ local pdfstring = lpdf.string
+ local pdfunicode = lpdf.unicode
+
+ get_flagged = function(t,f,k)
+ local tk = t[k] -- triggers resolve
+ local fk = f[k]
+ if not fk then
+ return tk
+ elseif fk == "name" then
+ return pdfconstant(tk)
+ elseif fk == "array" then
+ return pdfarray(tk)
+ elseif fk == "dictionary" then
+ return pdfarray(tk)
+ elseif fk == "rawtext" then
+ return pdfstring(tk)
+ elseif fk == "unicode" then
+ return pdfunicode(tk)
+ else
+ return tk
+ end
end
+
+else
+
+ get_flagged = function(t,f,k)
+ local tk = t[k] -- triggers resolve
+ local fk = f[k]
+ if not fk then
+ return tk
+ elseif fk == "rawtext" then
+ return frompdfdoc(tk)
+ else
+ return tk
+ end
+ end
+
end
local function prepare(document,d,t,n,k,mt,flags)
@@ -166,7 +216,7 @@ local function prepare(document,d,t,n,k,mt,flags)
local cached = document.__cache__[objnum]
if not cached then
cached = checked_access[kind](v,document,objnum,mt)
- if c then
+ if cached then
document.__cache__[objnum] = cached
document.__xrefs__[cached] = objnum
end
@@ -207,8 +257,11 @@ local function some_dictionary(d,document)
__call = function(t,k)
return get_flagged(t,f,k)
end,
+ -- __kind = function(k)
+ -- return f[k] or type(t[k])
+ -- end,
} )
- return t
+ return t, "dictionary"
end
end
@@ -225,8 +278,11 @@ local function get_dictionary(object,document,r,mt)
__call = function(t,k)
return get_flagged(t,f,k)
end,
+ -- __kind = function(k)
+ -- return f[k] or type(t[k])
+ -- end,
} )
- return t
+ return t, "dictionary"
end
end
@@ -260,8 +316,14 @@ local function prepare(document,a,t,n,k)
fatal_error("error: invalid value at index %a in array of %a",i,document.filename)
end
end
- getmetatable(t).__index = nil
- return t[k]
+ local m = getmetatable(t)
+ if m then
+ m.__index = nil
+ m.__len = nil
+ end
+ if k then
+ return t[k]
+ end
end
local function some_array(a,document)
@@ -270,10 +332,20 @@ local function some_array(a,document)
local t = { n = n }
setmetatable(t, {
__index = function(t,k)
- return prepare(document,a,t,n,k)
- end
+ return prepare(document,a,t,n,k,_,_,f)
+ end,
+ __len = function(t)
+ prepare(document,a,t,n,_,_,f)
+ return n
+ end,
+ __call = function(t,k)
+ return get_flagged(t,f,k)
+ end,
+ -- __kind = function(k)
+ -- return f[k] or type(t[k])
+ -- end,
} )
- return t
+ return t, "array"
end
end
@@ -282,12 +354,23 @@ local function get_array(object,document)
local n = a and arrayGetLength(a) or 0
if n > 0 then
local t = { n = n }
+ local f = { }
setmetatable(t, {
__index = function(t,k)
- return prepare(document,a,t,n,k)
- end
+ return prepare(document,a,t,n,k,_,_,f)
+ end,
+ __len = function(t)
+ prepare(document,a,t,n,_,_,f)
+ return n
+ end,
+ __call = function(t,k)
+ return get_flagged(t,f,k)
+ end,
+ -- __kind = function(k)
+ -- return f[k] or type(t[k])
+ -- end,
} )
- return t
+ return t, "array"
end
end
@@ -335,7 +418,7 @@ local u_pattern = lpeg.patterns.utfbom_16_be * lpeg.patterns.utf16_to_utf8_be
----- b_pattern = lpeg.patterns.hextobytes
local function get_string(v)
- -- the toutf function only converts a utf16 string and leves the original
+ -- the toutf function only converts a utf16 string and leaves the original
-- untouched otherwise; one might want to apply lpdf.frompdfdoc to a
-- non-unicode string
local s = getString(v)
@@ -344,7 +427,7 @@ local function get_string(v)
end
local u = lpegmatch(u_pattern,s)
if u then
- return u -- , "unicode"
+ return u, "unicode"
end
-- this is too tricky and fails on e.g. reload of url www.pragma-ade.com)
-- local b = lpegmatch(b_pattern,s)
@@ -354,6 +437,10 @@ local function get_string(v)
return s, "rawtext"
end
+local function get_name(v)
+ return getName(v), "name"
+end
+
local function get_null()
return nil
end
@@ -369,17 +456,17 @@ local function invalidaccess(k,document)
end
end
-checked_access = table.setmetatableindex(function(t,k)
+checked_access = setmetatableindex(function(t,k)
return function(v,document)
invalidaccess(k,document)
end
end)
checked_access[typenumbers.boolean] = getBool
-checked_access[typenumbers.integer] = getNum
+checked_access[typenumbers.integer] = getInt
checked_access[typenumbers.real] = getReal
checked_access[typenumbers.string] = get_string -- getString
-checked_access[typenumbers.name] = getName
+checked_access[typenumbers.name] = get_name
checked_access[typenumbers.null] = get_null
checked_access[typenumbers.array] = get_array -- d,document,r
checked_access[typenumbers.dictionary] = get_dictionary -- d,document,r
@@ -468,20 +555,25 @@ local function getstructure(document)
return document.Catalog.StructTreeRoot
end
+-- This is the only messy helper. We can't access the root as any object (it seems)
+-- so we need a few low level acessors. It's anyway sort of simple enough to deal
+-- with but it won't win a beauty contest.
+
local function getpages(document,Catalog)
- local __data__ = document.__data__
- local __xrefs__ = document.__xrefs__
- local __cache__ = document.__cache__
- local __xref__ = document.__xref__
+ local __data__ = document.__data__
+ local __xrefs__ = document.__xrefs__
+ local __cache__ = document.__cache__
+ local __xref__ = document.__xref__
+ --
+ local rawcatalog = getRawCatalog(__data__)
+ local nofpages = getNumPages(rawcatalog)
--
- local catalog = __data__:getCatalog()
- local pages = { }
- local nofpages = catalog:getNumPages()
- local metatable = { __index = Catalog.Pages }
+ local pages = { }
+ local metatable = { __index = Catalog.Pages } -- somewhat empty
--
for pagenumber=1,nofpages do
- local pagereference = catalog:getPageRef(pagenumber).num
- local pageobject = __xref__:fetch(pagereference,0)
+ local pagereference = getPageRef(rawcatalog,pagenumber).num
+ local pageobject = fetch(__xref__,pagereference,0)
local pagedata = get_dictionary(pageobject,document,pagereference,metatable)
if pagedata then
-- rawset(pagedata,"number",pagenumber)
@@ -496,59 +588,53 @@ local function getpages(document,Catalog)
--
pages.n = nofpages
--
+ document.pages = pages
return pages
end
--- loader
-
-local function delayed(document,tag,f)
- local t = { }
- setmetatable(t, { __index = function(t,k)
- local result = f()
- if result then
- document[tag] = result
- return result[k]
- end
- end } )
- return t
+local function resolve(document,k)
+ local entry = nil
+ local Catalog = document.Catalog
+ local Names = Catalog.Names
+ if k == "pages" then
+ entry = getpages(document,Catalog)
+ elseif k == "destinations" then
+ entry = getnames(document,Names and Names.Dests)
+ elseif k == "javascripts" then
+ entry = getnames(document,Names and Names.JS)
+ elseif k == "widgets" then
+ entry = getnames(document,Names and Names.AcroForm)
+ elseif k == "embeddedfiles" then
+ entry = getnames(document,Names and Names.EmbeddedFiles)
+ elseif k == "layers" then
+ entry = getlayers(document)
+ elseif k == "structure" then
+ entry = getstructure(document)
+ end
+ document[k] = entry
+ return entry
end
--- local catobj = data:getXRef():fetch(data:getXRef():getRootNum(),data:getXRef():getRootGen())
--- print(catobj:getDict(),data:getXRef():getCatalog():getDict())
-
local loaded = { }
function lpdf_epdf.load(filename)
local document = loaded[filename]
if not document then
statistics.starttiming(lpdf_epdf)
- local __data__ = pdf_open(filename) -- maybe resolvers.find_file
+ local __data__ = openPDF(filename) -- maybe resolvers.find_file
if __data__ then
- local __xref__ = __data__:getXRef()
+ local __xref__ = getXRef(__data__)
document = {
filename = filename,
__cache__ = { },
__xrefs__ = { },
__fonts__ = { },
__data__ = __data__,
- __xref__ = __xref__,
+ __xref__ = __xref__
}
- --
- initialize_methods(__xref__)
- --
- local Catalog = some_dictionary(__xref__:getCatalog():getDict(),document)
- local Info = some_dictionary(__xref__:getDocInfo():getDict(),document)
- --
- document.Catalog = Catalog
- document.Info = Info
- -- a few handy helper tables
- document.pages = delayed(document,"pages", function() return getpages(document,Catalog) end)
- document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end)
- document.javascripts = delayed(document,"javascripts", function() return getnames(document,Catalog.Names and Catalog.Names.JS) end)
- document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end)
- document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names and Catalog.Names.EmbeddedFiles) end)
- document.layers = delayed(document,"layers", function() return getlayers(document) end)
- document.structure = delayed(document,"structure", function() return getstructure(document) end)
+ document.Catalog = some_dictionary(getDict(getCatalog(__xref__)),document)
+ document.Info = some_dictionary(getDict(getDocInfo(__xref__)),document)
+ setmetatableindex(document,resolve)
else
document = false
end
@@ -663,7 +749,7 @@ local function analyzefonts(document,resources) -- unfinished
fonts[id] = {
tounicode = type(tounicode) == "table" and tounicode or { }
}
- table.setmetatableindex(fonts[id],"self")
+ setmetatableindex(fonts[id],"self")
end
end
end
@@ -787,7 +873,7 @@ function lpdf_epdf.contenttotext(document,list) -- maybe signal fonts
local dy = abs(last_y - ty)
if dy > linefactor*last_f then
if last > 0 then
- if find(text[last],softhyphen) then
+ if find(text[last],softhyphen,1,true) then
-- ignore
else
last = last + 1
diff --git a/tex/context/base/mkiv/lpdf-fld.lua b/tex/context/base/mkiv/lpdf-fld.lua
index bbafb299b..73de5eaf6 100644
--- a/tex/context/base/mkiv/lpdf-fld.lua
+++ b/tex/context/base/mkiv/lpdf-fld.lua
@@ -6,56 +6,52 @@ if not modules then modules = { } end modules ['lpdf-fld'] = {
license = "see context related readme files"
}
--- The problem with widgets is that so far each version of acrobat
--- has some rendering problem. I tried to keep up with this but
--- it makes no sense to do so as one cannot rely on the viewer
--- not changing. Especially Btn fields are tricky as their appearences
--- need to be synchronized in the case of children but e.g. acrobat
--- 10 does not retain the state and forces a check symbol. If you
--- make a file in acrobat then it has MK entries that seem to overload
--- the already present appearance streams (they're probably only meant for
--- printing) as it looks like the viewer has some fallback on (auto
--- generated) MK behaviour built in. So ... hard to test. Unfortunately
--- not even the default appearance is generated. This will probably be
--- solved at some point.
+-- The problem with widgets is that so far each version of acrobat has some
+-- rendering problem. I tried to keep up with this but it makes no sense to do so as
+-- one cannot rely on the viewer not changing. Especially Btn fields are tricky as
+-- their appearences need to be synchronized in the case of children but e.g.
+-- acrobat 10 does not retain the state and forces a check symbol. If you make a
+-- file in acrobat then it has MK entries that seem to overload the already present
+-- appearance streams (they're probably only meant for printing) as it looks like
+-- the viewer has some fallback on (auto generated) MK behaviour built in. So ...
+-- hard to test. Unfortunately not even the default appearance is generated. This
+-- will probably be solved at some point.
--
--- Also, for some reason the viewer does not always show custom appearances
--- when fields are being rolled over or clicked upon, and circles or checks
--- pop up when you don't expect them. I fear that this kind of instability
--- eventually will kill pdf forms. After all, the manual says: "individual
--- annotation handlers may ignore this entry and provide their own appearances"
--- and one might wonder what 'individual' means here, but effectively this
--- renders the whole concept of appearances useless.
+-- Also, for some reason the viewer does not always show custom appearances when
+-- fields are being rolled over or clicked upon, and circles or checks pop up when
+-- you don't expect them. I fear that this kind of instability eventually will kill
+-- pdf forms. After all, the manual says: "individual annotation handlers may ignore
+-- this entry and provide their own appearances" and one might wonder what
+-- 'individual' means here, but effectively this renders the whole concept of
+-- appearances useless.
--
--- Okay, here is one observation. A pdf file contains objects and one might
--- consider each one to be a static entity when read in. However, acrobat
--- starts rendering and seems to manipulate (appearance streams) of objects
--- in place (this is visible when the file is saved again). And, combined
--- with some other caching and hashing, this might give side effects for
--- shared objects. So, it seems that for some cases one can best be not too
--- clever and not share but duplicate information. Of course this defeats the
--- whole purpose of these objects. Of course I can be wrong.
+-- Okay, here is one observation. A pdf file contains objects and one might consider
+-- each one to be a static entity when read in. However, acrobat starts rendering
+-- and seems to manipulate (appearance streams) of objects in place (this is visible
+-- when the file is saved again). And, combined with some other caching and hashing,
+-- this might give side effects for shared objects. So, it seems that for some cases
+-- one can best be not too clever and not share but duplicate information. Of course
+-- this defeats the whole purpose of these objects. Of course I can be wrong.
--
-- A rarther weird side effect of the viewer is that the highlighting of fields
--- obscures values, unless you uses one of the BS variants, and this makes
--- custum appearances rather useless as there is no way to control this apart
--- from changing the viewer preferences. It could of course be a bug but it would
--- be nice if the highlighting was at least transparent. I have no clue why the
--- built in shapes work ok (some xform based appearances are generated) while
--- equally valid other xforms fail. It looks like acrobat appearances come on
--- top (being refered to in the MK) while custom ones are behind the highlight
--- rectangle. One can disable the "Show border hover color for fields" option
--- in the preferences. If you load java-imp-rhh this side effect gets disabled
--- and you get what you expect (it took me a while to figure out this hack).
+-- obscures values, unless you uses one of the BS variants, and this makes custum
+-- appearances rather useless as there is no way to control this apart from changing
+-- the viewer preferences. It could of course be a bug but it would be nice if the
+-- highlighting was at least transparent. I have no clue why the built in shapes
+-- work ok (some xform based appearances are generated) while equally valid other
+-- xforms fail. It looks like acrobat appearances come on top (being refered to in
+-- the MK) while custom ones are behind the highlight rectangle. One can disable the
+-- "Show border hover color for fields" option in the preferences. If you load
+-- java-imp-rhh this side effect gets disabled and you get what you expect (it took
+-- me a while to figure out this hack).
--
--- When highlighting is enabled, those default symbols flash up, so it looks
--- like we have some inteference between this setting and custom appearances.
+-- When highlighting is enabled, those default symbols flash up, so it looks like we
+-- have some inteference between this setting and custom appearances.
--
--- Anyhow, the NeedAppearances is really needed in order to get a rendering
--- for printing especially when highlighting (those colorfull foregrounds) is
--- on.
+-- Anyhow, the NeedAppearances is really needed in order to get a rendering for
+-- printing especially when highlighting (those colorfull foregrounds) is on.
-local tostring, next = tostring, next
+local tostring, tonumber, next = tostring, tonumber, next
local gmatch, lower, format, formatters = string.gmatch, string.lower, string.format, string.formatters
local lpegmatch = lpeg.match
local bpfactor, todimen = number.dimenfactors.bp, string.todimen
@@ -92,6 +88,7 @@ local pdfshareobjectreference = lpdf.shareobjectreference
local pdfshareobject = lpdf.shareobject
local pdfreserveobject = lpdf.reserveobject
local pdfaction = lpdf.action
+local pdfmajorversion = lpdf.majorversion
local pdfcolor = lpdf.color
local pdfcolorvalues = lpdf.colorvalues
@@ -124,39 +121,39 @@ function codeinjections.setformsmethod(name)
end
local flag = { -- /Ff
- ReadOnly = 2^ 0, -- 1
- Required = 2^ 1, -- 2
- NoExport = 2^ 2, -- 3
- MultiLine = 2^12, -- 13
- Password = 2^13, -- 14
- NoToggleToOff = 2^14, -- 15
- Radio = 2^15, -- 16
- PushButton = 2^16, -- 17
- PopUp = 2^17, -- 18
- Edit = 2^18, -- 19
- Sort = 2^19, -- 20
- FileSelect = 2^20, -- 21
- DoNotSpellCheck = 2^22, -- 23
- DoNotScroll = 2^23, -- 24
- Comb = 2^24, -- 25
- RichText = 2^25, -- 26
- RadiosInUnison = 2^25, -- 26
- CommitOnSelChange = 2^26, -- 27
+ ReadOnly = 0x00000001, -- 2^ 0
+ Required = 0x00000002, -- 2^ 1
+ NoExport = 0x00000004, -- 2^ 2
+ MultiLine = 0x00001000, -- 2^12
+ Password = 0x00002000, -- 2^13
+ NoToggleToOff = 0x00004000, -- 2^14
+ Radio = 0x00008000, -- 2^15
+ PushButton = 0x00010000, -- 2^16
+ PopUp = 0x00020000, -- 2^17
+ Edit = 0x00040000, -- 2^18
+ Sort = 0x00080000, -- 2^19
+ FileSelect = 0x00100000, -- 2^20
+ DoNotSpellCheck = 0x00400000, -- 2^22
+ DoNotScroll = 0x00800000, -- 2^23
+ Comb = 0x01000000, -- 2^24
+ RichText = 0x02000000, -- 2^25
+ RadiosInUnison = 0x02000000, -- 2^25
+ CommitOnSelChange = 0x04000000, -- 2^26
}
local plus = { -- /F
- Invisible = 2^0, -- 1
- Hidden = 2^1, -- 2
- Printable = 2^2, -- 3
- Print = 2^2, -- 3
- NoZoom = 2^3, -- 4
- NoRotate = 2^4, -- 5
- NoView = 2^5, -- 6
- ReadOnly = 2^6, -- 7
- Locked = 2^7, -- 8
- ToggleNoView = 2^8, -- 9
- LockedContents = 2^9, -- 10,
- AutoView = 2^8, -- 6 + 9 ?
+ Invisible = 0x00000001, -- 2^0
+ Hidden = 0x00000002, -- 2^1
+ Printable = 0x00000004, -- 2^2
+ Print = 0x00000004, -- 2^2
+ NoZoom = 0x00000008, -- 2^3
+ NoRotate = 0x00000010, -- 2^4
+ NoView = 0x00000020, -- 2^5
+ ReadOnly = 0x00000040, -- 2^6
+ Locked = 0x00000080, -- 2^7
+ ToggleNoView = 0x00000100, -- 2^8
+ LockedContents = 0x00000200, -- 2^9
+ AutoView = 0x00000100, -- 2^8
}
-- todo: check what is interfaced
@@ -174,6 +171,9 @@ plus.hidden = plus.Hidden
plus.printable = plus.Printable
plus.auto = plus.AutoView
+lpdf.flags.widgets = flag
+lpdf.flags.annotations = plus
+
-- some day .. lpeg with function or table
local function fieldflag(specification) -- /Ff
@@ -360,6 +360,8 @@ local function fieldsurrounding(specification)
return tostring(stream)
end
+codeinjections.fieldsurrounding = fieldsurrounding
+
local function registerfonts()
if next(usedfonts) then
checkpdfdocencoding() -- already done
@@ -905,7 +907,7 @@ local function finishfields()
end
if #collected > 0 then
local acroform = pdfdictionary {
- NeedAppearances = true,
+ NeedAppearances = pdfmajorversion() == 1 or nil,
Fields = pdfreference(pdfflushobject(collected)),
CO = fieldsetlist(calculationset),
}
@@ -1294,78 +1296,18 @@ function methods.push(name,specification)
end
local function makeradioparent(field,specification)
--- specification = enhance(specification,"Radio,RadiosInUnison")
specification = enhance(specification,"Radio,RadiosInUnison,Print,NoToggleToOff")
--- specification = enhance(specification,"Radio,Print,NoToggleToOff")
local d = pdfdictionary {
T = field.name,
FT = pdf_btn,
--- F = fieldplus(specification),
+ -- F = fieldplus(specification),
Ff = fieldflag(specification),
--- H = pdf_n,
+ -- H = pdf_n,
V = fielddefault(field),
}
save_parent(field,specification,d,true)
end
--- local function makeradiochild(name,specification)
--- local field, parent = clones[name], nil
--- if field then
--- field = radios[field.parent]
--- parent = fields[field.parent]
--- if not parent.pobj then
--- if trace_fields then
--- report_fields("forcing parent radio %a",parent.name)
--- end
--- makeradioparent(parent,parent)
--- end
--- else
--- field = radios[name]
--- if not field then
--- report_fields("there is some problem with field %a",name)
--- return nil
--- end
--- parent = fields[field.parent]
--- if not parent.pobj then
--- if trace_fields then
--- report_fields("using parent radio %a",name)
--- end
--- makeradioparent(parent,parent)
--- end
--- end
--- if trace_fields then
--- report_fields("using child radio %a with values %a and default %a",name,field.values,field.default)
--- end
--- local fontsymbol = specification.fontsymbol
--- fontsymbol="star"
--- local d = pdfdictionary {
--- Subtype = pdf_widget,
--- Parent = pdfreference(parent.pobj),
--- F = fieldplus(specification),
--- OC = fieldlayer(specification),
--- AA = fieldactions(specification),
--- H = pdf_n,
--- }
--- if fontsymbol and fontsymbol ~= "" then
--- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok
--- specification.fontsymbol = todingbat(fontsymbol)
--- specification.fontstyle = "symbol"
--- specification.fontalternative = "dingbats"
--- d.DA = fieldsurrounding(specification)
--- d.MK = fieldrendering(specification)
--- d.AS = pdfconstant(value) -- default -- mandate when AP but confuses viewers
--- d.AP = appearance
--- return save_kid(parent,specification,d,value)
--- -- return save_kid(parent,specification,d,name)
--- else
--- -- local appearance, default, value = fieldstates_radio(field,true) -- false is also ok
--- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok
--- d.AS = default -- mandate when AP but confuses viewers
--- d.AP = appearance
--- return save_kid(parent,specification,d,value)
--- end
--- end
-
local function makeradiochild(name,specification)
local field, parent = clones[name], nil
if field then
diff --git a/tex/context/base/mkiv/lpdf-fmt.lua b/tex/context/base/mkiv/lpdf-fmt.lua
index 8bbd3374f..0830d2d8d 100644
--- a/tex/context/base/mkiv/lpdf-fmt.lua
+++ b/tex/context/base/mkiv/lpdf-fmt.lua
@@ -10,6 +10,7 @@ if not modules then modules = { } end modules ['lpdf-fmt'] = {
-- context --directives="backend.format=PDF/X-1a:2001" --trackers=backend.format yourfile
+local tonumber = tonumber
local lower, gmatch, format, find = string.lower, string.gmatch, string.format, string.find
local concat, serialize, sortedhash = table.concat, table.serialize, table.sortedhash
@@ -354,7 +355,7 @@ local formats = utilities.storage.allocate {
pdf_version = 1.7,
format_name = "pdf/a-2a",
xmp_file = "lpdf-pda.xml",
- gts_flag = "GTS_PDFA2",
+ gts_flag = "GTS_PDFA1",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -378,7 +379,7 @@ local formats = utilities.storage.allocate {
pdf_version = 1.7,
format_name = "pdf/a-3a",
xmp_file = "lpdf-pda.xml",
- gts_flag = "GTS_PDFA3",
+ gts_flag = "GTS_PDFA1",
gray_scale = true,
cmyk_colors = true,
rgb_colors = true,
@@ -732,17 +733,15 @@ function codeinjections.setformat(s)
if not level then
level = 3 -- good compromise, default anyway
end
- local pdf_version = spec.pdf_version * 10
- local inject_metadata = spec.inject_metadata
- local majorversion = math.div(pdf_version,10)
- local minorversion = math.mod(pdf_version,10)
- local objectcompression = spec.object_compression and pdf_version >= 15
- local compresslevel = level or pdf.getcompresslevel() -- keep default
- local objectcompresslevel = (objectcompression and (level or pdf.getobjcompresslevel())) or 0
- pdf.setcompresslevel (compresslevel)
- pdf.setobjcompresslevel(objectcompresslevel)
- pdf.setmajorversion (majorversion)
- pdf.setminorversion (minorversion)
+ local pdf_version = spec.pdf_version * 10
+ local inject_metadata = spec.inject_metadata
+ local majorversion = math.div(pdf_version,10)
+ local minorversion = math.mod(pdf_version,10)
+ local objectcompression = spec.object_compression and pdf_version >= 15
+ local compresslevel = level or lpdf.compresslevel() -- keep default
+ local objectcompresslevel = (objectcompression and (level or lpdf.objectcompresslevel())) or 0
+ lpdf.setcompression(compresslevel,objectcompresslevel)
+ lpdf.setversion(majorversion,minorversion)
if objectcompression then
report_backend("forcing pdf version %s.%s, compression level %s, object compression level %s",
majorversion,minorversion,compresslevel,objectcompresslevel)
@@ -753,9 +752,8 @@ function codeinjections.setformat(s)
report_backend("forcing pdf version %s.%s, compression disabled",
majorversion,minorversion)
end
- if pdf.setomitcidset then
- pdf.setomitcidset(formatspecification.include_cidsets == false and 1 or 0)
- end
+ --
+ pdf.setomitcidset(formatspecification.include_cidsets == false and 1 or 0)
--
-- context.setupcolors { -- not this way
-- cmyk = spec.cmyk_colors and variables.yes or variables.no,
@@ -805,8 +803,7 @@ function codeinjections.setformat(s)
report_backend("error, format %a is not supported",format)
end
elseif level then
- pdf.setcompresslevel(level)
- pdf.setobjcompresslevel(level)
+ lpdf.setcompression(level,level)
else
-- we ignore this as we hook it in \everysetupbackend
end
diff --git a/tex/context/base/mkiv/lpdf-grp.lua b/tex/context/base/mkiv/lpdf-grp.lua
index 1ebc9b23d..34925fddd 100644
--- a/tex/context/base/mkiv/lpdf-grp.lua
+++ b/tex/context/base/mkiv/lpdf-grp.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['lpdf-grp'] = {
license = "see context related readme files"
}
-local type = type
+local type, tonumber = type, tonumber
local formatters, gsub = string.formatters, string.gsub
local concat = table.concat
local round = math.round
@@ -270,7 +270,7 @@ function img.package(image) -- see lpdf-u3d **
local height = boundingbox[4]
local xform = img.scan {
attr = resources(),
- stream = formatters["%F 0 0 %F 0 0 cm /%s Do"](width,height,imagetag),
+ stream = formatters["%.6F 0 0 %.6F 0 0 cm /%s Do"](width,height,imagetag),
bbox = { 0, 0, width/basepoints, height/basepoints },
}
img.immediatewrite(xform)
@@ -280,7 +280,7 @@ end
-- experimental
local nofpatterns = 0
-local f_pattern = formatters["q /Pattern cs /%s scn 0 0 %F %F re f Q"] -- q Q is not really needed
+local f_pattern = formatters["q /Pattern cs /%s scn 0 0 %.6F %.6F re f Q"] -- q Q is not really needed
local texsavebox = tex.saveboxresource
diff --git a/tex/context/base/mkiv/lpdf-ini.lua b/tex/context/base/mkiv/lpdf-ini.lua
index 1b24269a6..f8cff2032 100644
--- a/tex/context/base/mkiv/lpdf-ini.lua
+++ b/tex/context/base/mkiv/lpdf-ini.lua
@@ -11,11 +11,12 @@ if not modules then modules = { } end modules ['lpdf-ini'] = {
local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset
local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
local utfchar, utfbyte, utfvalues = utf.char, utf.byte, utf.values
-local sind, cosd, floor, max, min = math.sind, math.cosd, math.floor, math.max, math.min
+local sind, cosd, max, min = math.sind, math.cosd, math.max, math.min
local sort = table.sort
local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
local formatters = string.formatters
local isboolean = string.is_boolean
+local rshift = bit32.rshift
local report_objects = logs.reporter("backend","objects")
local report_finalizing = logs.reporter("backend","finalizing")
@@ -24,6 +25,8 @@ local report_blocked = logs.reporter("backend","blocked")
local implement = interfaces.implement
local two_strings = interfaces.strings[2]
+local context = context
+
-- In ConTeXt MkIV we use utf8 exclusively so all strings get mapped onto a hex
-- encoded utf16 string type between <>. We could probably save some bytes by using
-- strings between () but then we end up with escaped ()\ too.
@@ -66,18 +69,9 @@ local two_strings = interfaces.strings[2]
local pdf = pdf
local factor = number.dimenfactors.bp
-do
-
- local texget = tex.get
- local texset = tex.set
-
- function pdf.setmajorversion (n) texset("global","pdfmajorversion", n) end
- function pdf.getmajorversion ( ) return texget("pdfmajorversion") end
-
-end
-
local pdfsetinfo = pdf.setinfo
local pdfsetcatalog = pdf.setcatalog
+----- pdfsettrailerid = pdf.settrailerid
----- pdfsetnames = pdf.setnames
----- pdfsettrailer = pdf.settrailer
@@ -109,6 +103,8 @@ pdfdisablecommand("setpageattributes")
pdfdisablecommand("setpagesattributes")
pdfdisablecommand("registerannot")
+pdf.disablecommand = pdfdisablecommand
+
local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end)
local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end)
local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end)
@@ -126,6 +122,65 @@ backends.pdf = pdfbackend
lpdf = lpdf or { }
local lpdf = lpdf
+lpdf.flags = lpdf.flags or { } -- will be filled later
+
+do
+
+ local setmajorversion = pdf.setmajorversion
+ local setminorversion = pdf.setminorversion
+ local getmajorversion = pdf.getmajorversion
+ local getminorversion = pdf.getminorversion
+
+ if not setmajorversion then
+
+ setmajorversion = function() end
+ getmajorversion = function() return 1 end
+
+ pdf.setmajorversion = setmajorversion
+ pdf.getmajorversion = getmajorversion
+
+ end
+
+ function lpdf.setversion(major,minor)
+ setmajorversion(major or 1)
+ setminorversion(minor or 7)
+ end
+
+ function lpdf.getversion(major,minor)
+ return getmajorversion(), getminorversion()
+ end
+
+ lpdf.majorversion = getmajorversion
+ lpdf.minorversion = getminorversion
+
+end
+
+do
+
+ local setcompresslevel = pdf.setcompresslevel
+ local setobjectcompresslevel = pdf.setobjcompresslevel
+ local getcompresslevel = pdf.getcompresslevel
+ local getobjectcompresslevel = pdf.getobjcompresslevel
+
+ local frozen = false
+
+ function lpdf.setcompression(level,objectlevel,freeze)
+ if not frozen then
+ setcompresslevel(level or 3)
+ setobjectcompresslevel(objectlevel or level or 3)
+ frozen = freeze
+ end
+ end
+
+ function lpdf.getcompression()
+ return getcompresslevel(), getobjectcompresslevel()
+ end
+
+ lpdf.compresslevel = getcompresslevel
+ lpdf.objectcompresslevel = getobjectcompresslevel
+
+end
+
local codeinjections = pdfbackend.codeinjections
local nodeinjections = pdfbackend.nodeinjections
@@ -201,8 +256,7 @@ end
-- if b < 0x10000 then
-- r[n] = format("%04x",b)
-- else
--- -- r[n] = format("%04x%04x",b/1024+0xD800,b%1024+0xDC00)
--- r[n] = format("%04x%04x",floor(b/1024),b%1024+0xDC00) --bit32.rshift(b,10)
+-- r[n] = format("%04x%04x",rshift(b,10),b%1024+0xDC00)
-- end
-- end
-- n = n + 1
@@ -216,8 +270,7 @@ local cache = table.setmetatableindex(function(t,k) -- can be made weak
if v < 0x10000 then
v = format("%04x",v)
else
- -- v = format("%04x%04x",v/1024+0xD800,v%1024+0xDC00)
- v = format("%04x%04x",floor(v/1024),v%1024+0xDC00)
+ v = format("%04x%04x",rshift(v,10),v%1024+0xDC00)
end
t[k] = v
return v
@@ -543,6 +596,8 @@ function lpdf.escaped(str)
return lpegmatch(escaped,str) or str
end
+local pdfnull, pdfboolean, pdfreference, pdfverbose
+
do
local p_null = { } setmetatable(p_null, mt_z)
@@ -618,9 +673,9 @@ function lpdf.reserveobject(name)
return r
end
+-- lpdf.reserveobject = pdfreserveobject
-- lpdf.immediateobject = pdfimmediateobject
-- lpdf.deferredobject = pdfdeferredobject
--- lpdf.object = pdfdeferredobject
-- lpdf.referenceobject = pdfreferenceobject
local pagereference = pdf.pageref -- tex.pdfpageref is obsolete
@@ -891,6 +946,15 @@ do
if not environment.initex then
trace_flush("info")
info.Type = nil
+ if lpdf.majorversion() > 1 then
+ for k, v in next, info do
+ if k == "CreationDate" or k == "ModDate" then
+ -- mandate >= 2.0
+ else
+ info[k] = nil
+ end
+ end
+ end
pdfsetinfo(info())
end
end
@@ -1011,7 +1075,7 @@ end
function lpdf.rotationcm(a)
local s, c = sind(a), cosd(a)
- return format("%0.6F %0.6F %0.6F %0.6F 0 0 cm",c,s,-s,c)
+ return format("%.6F %.6F %.6F %.6F 0 0 cm",c,s,-s,c)
end
-- ! -> universaltime
@@ -1201,15 +1265,17 @@ do
local f_actual_text = formatters["/Span <> BDC"]
local context = context
- local pdfdirect = nodes.pool.pdfdirect
+ local pdfdirect = nodes.pool.pdfdirectliteral
-- todo: use tounicode from the font mapper
+ -- floor(unicode/1024) => rshift(unicode,10) -- better for 5.3
+
function codeinjections.unicodetoactualtext(unicode,pdfcode)
if unicode < 0x10000 then
return f_actual_text_one(unicode,pdfcode)
else
- return f_actual_text_two(unicode/1024+0xD800,unicode%1024+0xDC00,pdfcode)
+ return f_actual_text_two(rshift(unicode,10)+0xD800,unicode%1024+0xDC00,pdfcode)
end
end
@@ -1219,7 +1285,7 @@ do
elseif unicode < 0x10000 then
return f_actual_text_one_b(unicode)
else
- return f_actual_text_two_b(unicode/1024+0xD800,unicode%1024+0xDC00)
+ return f_actual_text_two_b(rshift(unicode,10)+0xD800,unicode%1024+0xDC00)
end
end
@@ -1233,7 +1299,7 @@ do
elseif unicode < 0x10000 then
return f_actual_text_one_b_not(unicode)
else
- return f_actual_text_two_b_not(unicode/1024+0xD800,unicode%1024+0xDC00)
+ return f_actual_text_two_b_not(rshift(unicode,10)+0xD800,unicode%1024+0xDC00)
end
end
@@ -1272,3 +1338,63 @@ implement { name = "lpdf_adddocumentcolorspace", arguments = two_strings, action
implement { name = "lpdf_adddocumentpattern", arguments = two_strings, actions = function(a,b) lpdf.adddocumentpattern (a,pdfverbose(b)) end }
implement { name = "lpdf_adddocumentshade", arguments = two_strings, actions = function(a,b) lpdf.adddocumentshade (a,pdfverbose(b)) end }
+-- more helpers: copy from lepd to lpdf
+
+function lpdf.copyconstant(v)
+ if v ~= nil then
+ return pdfconstant(v)
+ end
+end
+
+function lpdf.copyboolean(v)
+ if v ~= nil then
+ return pdfboolean(v)
+ end
+end
+
+function lpdf.copyunicode(v)
+ if v then
+ return pdfunicode(v)
+ end
+end
+
+function lpdf.copyarray(a)
+ if a then
+ local t = pdfarray()
+ local k = a.__kind
+ for i=1,#a do
+ t[i] = a(i)
+ end
+-- inspect(t)
+ return t
+ end
+end
+
+function lpdf.copydictionary(d)
+ if d then
+ local t = pdfdictionary()
+ for k, v in next, d do
+ t[k] = d(k)
+ end
+-- inspect(t)
+ return t
+ end
+end
+
+function lpdf.copynumber(v)
+ return v
+end
+
+function lpdf.copyinteger(v)
+ return v -- maybe checking or round ?
+end
+
+function lpdf.copyfloat(v)
+ return v
+end
+
+function lpdf.copystring(v)
+ if v then
+ return pdfstring(v)
+ end
+end
diff --git a/tex/context/base/mkiv/lpdf-mis.lua b/tex/context/base/mkiv/lpdf-mis.lua
index dc3f8560a..07dc3dcf1 100644
--- a/tex/context/base/mkiv/lpdf-mis.lua
+++ b/tex/context/base/mkiv/lpdf-mis.lua
@@ -15,8 +15,9 @@ if not modules then modules = { } end modules ['lpdf-mis'] = {
-- referencing and references had to be tracked in multiple passes. Of
-- course there are a couple of more changes.
-local next, tostring = next, tostring
+local next, tostring, type = next, tostring, type
local format, gsub, formatters = string.format, string.gsub, string.formatters
+local flattened = table.flattened
local texset, texget = tex.set, tex.get
local backends, lpdf, nodes = backends, lpdf, nodes
@@ -25,10 +26,11 @@ local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
local registrations = backends.pdf.registrations
-local copy_node = node.copy
+local nuts = nodes.nuts
+local copy_node = nuts.copy
-local nodepool = nodes.pool
-local pdfliteral = nodepool.pdfliteral
+local nodepool = nuts.pool
+local pdfpageliteral = nodepool.pdfpageliteral
local register = nodepool.register
local pdfdictionary = lpdf.dictionary
@@ -41,6 +43,7 @@ local pdfstring = lpdf.string
local pdfflushobject = lpdf.flushobject
local pdfflushstreamobject = lpdf.flushstreamobject
local pdfaction = lpdf.action
+local pdfminorversion = lpdf.minorversion
local formattedtimestamp = lpdf.pdftimestamp
local adddocumentextgstate = lpdf.adddocumentextgstate
@@ -65,11 +68,13 @@ local v_landscape = variables.landscape
local v_portrait = variables.portrait
local v_page = variables.page
local v_paper = variables.paper
+local v_attachment = variables.attachment
+local v_layer = variables.layer
-local positive = register(pdfliteral("/GSpositive gs"))
-local negative = register(pdfliteral("/GSnegative gs"))
-local overprint = register(pdfliteral("/GSoverprint gs"))
-local knockout = register(pdfliteral("/GSknockout gs"))
+local positive = register(pdfpageliteral("/GSpositive gs"))
+local negative = register(pdfpageliteral("/GSnegative gs"))
+local overprint = register(pdfpageliteral("/GSoverprint gs"))
+local knockout = register(pdfpageliteral("/GSknockout gs"))
local function initializenegative()
local a = pdfarray { 0, 1 }
@@ -261,15 +266,51 @@ lpdf.registerdocumentfinalizer(flushjavascripts,"javascripts")
-- -- --
-local pagespecs = {
- [v_none] = {
- },
+local plusspecs = {
[v_max] = {
mode = "FullScreen",
},
[v_bookmark] = {
mode = "UseOutlines",
},
+ [v_attachment] = {
+ mode = "UseAttachments",
+ },
+ [v_layer] = {
+ mode = "UseOC",
+ },
+ [v_fit] = {
+ fit = true,
+ },
+ [v_doublesided] = {
+ layout = "TwoColumnRight",
+ },
+ [v_fixed] = {
+ fixed = true,
+ },
+ [v_landscape] = {
+ duplex = "DuplexFlipShortEdge",
+ },
+ [v_portrait] = {
+ duplex = "DuplexFlipLongEdge",
+ },
+ [v_page] = {
+ duplex = "Simplex" ,
+ },
+ [v_paper] = {
+ paper = true,
+ },
+}
+
+local pagespecs = {
+ --
+ [v_max] = plusspecs[v_max],
+ [v_bookmark] = plusspecs[v_bookmark],
+ [v_attachment] = plusspecs[v_attachment],
+ [v_layer] = plusspecs[v_layer],
+ --
+ [v_none] = {
+ },
[v_fit] = {
mode = "UseNone",
fit = true,
@@ -322,38 +363,9 @@ local pagespecs = {
},
}
-local plusspecs = {
- [v_max] = {
- mode = "FullScreen",
- },
- [v_bookmark] = {
- mode = "UseOutlines",
- },
- [v_fit] = {
- fit = true,
- },
- [v_doublesided] = {
- layout = "TwoColumnRight",
- },
- [v_fixed] = {
- fixed = true,
- },
- [v_landscape] = {
- duplex = "DuplexFlipShortEdge",
- },
- [v_portrait] = {
- duplex = "DuplexFlipLongEdge",
- },
- [v_page] = {
- duplex = "Simplex" ,
- },
- [v_paper] = {
- paper = true,
- },
-}
-
local pagespec, topoffset, leftoffset, height, width, doublesided = "default", 0, 0, 0, 0, false
local cropoffset, bleedoffset, trimoffset, artoffset = 0, 0, 0, 0
+local marked = false
local copies = false
function codeinjections.setupcanvas(specification)
@@ -371,6 +383,7 @@ function codeinjections.setupcanvas(specification)
leftoffset = specification.leftoffset or 0
height = specification.height or texget("pageheight")
width = specification.width or texget("pagewidth")
+ marked = specification.print
--
copies = specification.copies
if copies and copies < 2 then
@@ -429,23 +442,36 @@ local function documentspecification()
if mode then
addtocatalog("PageMode",pdfconstant(mode))
end
- if fit or fixed or duplex or copies or paper then
+ local prints = nil
+ if marked then
+ local pages = structures.pages
+ local marked = pages.allmarked(marked)
+ local nofmarked = marked and #marked or 0
+ if nofmarked > 0 then
+ -- the spec is wrong in saying that numbering starts at 1 which of course makes
+ -- sense as most real documents start with page 0 .. sigh
+ for i=1,#marked do marked[i] = marked[i] - 1 end
+ prints = pdfarray(flattened(pages.toranges(marked)))
+ end
+ end
+ if fit or fixed or duplex or copies or paper or prints then
addtocatalog("ViewerPreferences",pdfdictionary {
FitWindow = fit and true or nil,
PrintScaling = fixed and pdfconstant("None") or nil,
Duplex = duplex and pdfconstant(duplex) or nil,
NumCopies = copies and copies or nil,
PickTrayByPDFSize = paper and true or nil,
+ PrintPageRange = prints or nil,
})
end
addtoinfo ("Trapped", pdfconstant("False")) -- '/Trapped' in /Info, 'Trapped' in XMP
- addtocatalog("Version", pdfconstant(format("1.%s",pdf.getminorversion())))
+ addtocatalog("Version", pdfconstant(format("1.%s",pdfminorversion())))
end
-- temp hack: the mediabox is not under our control and has a precision of 5 digits
local factor = number.dimenfactors.bp
-local f_value = formatters["%0.5F"]
+local f_value = formatters["%0.6F"]
local function boxvalue(n) -- we could share them
return pdfverbose(f_value(factor * n))
diff --git a/tex/context/base/mkiv/lpdf-mov.lua b/tex/context/base/mkiv/lpdf-mov.lua
index 87375e4ce..42ba6fb00 100644
--- a/tex/context/base/mkiv/lpdf-mov.lua
+++ b/tex/context/base/mkiv/lpdf-mov.lua
@@ -8,7 +8,8 @@ if not modules then modules = { } end modules ['lpdf-mov'] = {
local format = string.format
-local lpdf = lpdf
+local lpdf = lpdf
+local context = context
local nodeinjections = backends.pdf.nodeinjections
local pdfconstant = lpdf.constant
@@ -44,7 +45,7 @@ function nodeinjections.insertmovie(specification)
end
function nodeinjections.insertsound(specification)
- -- rmanaged in interaction: repeat, label, foundname
+ -- managed in interaction: repeat, label, foundname
local soundclip = interactions.soundclips.soundclip(specification.label)
if soundclip then
local controldict = pdfdictionary {
diff --git a/tex/context/base/mkiv/lpdf-nod.lua b/tex/context/base/mkiv/lpdf-nod.lua
index 985d05a82..e3c1778f2 100644
--- a/tex/context/base/mkiv/lpdf-nod.lua
+++ b/tex/context/base/mkiv/lpdf-nod.lua
@@ -24,10 +24,12 @@ local new_node = nuts.new
local nodepool = nuts.pool
local register = nodepool.register
-local pdforiginliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdforiginliteral,"mode",0) -- set_origin_code
-local pdfpageliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfpageliteral, "mode",1) -- page_code
-local pdfdirectliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfdirectliteral,"mode",2) -- direct_code
-local pdfrawliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfrawliteral, "mode",3) -- raw_code
+local literalvalues = nodes.literalvalues
+
+local pdforiginliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdforiginliteral,"mode",literalvalues.origin)
+local pdfpageliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfpageliteral, "mode",literalvalues.page)
+local pdfdirectliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfdirectliteral,"mode",literalvalues.direct)
+local pdfrawliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) setfield(pdfrawliteral, "mode",literalvalues.raw)
local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
@@ -37,23 +39,12 @@ local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix)
local variables = interfaces.variables
-local views = { -- beware, we do support the pdf keys but this is *not* official
- xyz = 0, [variables.standard] = 0,
- fit = 1, [variables.fit] = 1,
- fith = 2, [variables.width] = 2,
- fitv = 3, [variables.height] = 3,
- fitb = 4,
- fitbh = 5, [variables.minwidth] = 5,
- fitbv = 6, [variables.minheight] = 6,
- fitr = 7,
-}
-
function nodepool.pdforiginliteral(str) local t = copy_node(pdforiginliteral) setfield(t,"data",str) return t end
function nodepool.pdfpageliteral (str) local t = copy_node(pdfpageliteral ) setfield(t,"data",str) return t end
function nodepool.pdfdirectliteral(str) local t = copy_node(pdfdirectliteral) setfield(t,"data",str) return t end
function nodepool.pdfrawliteral (str) local t = copy_node(pdfrawliteral ) setfield(t,"data",str) return t end
-nodepool.pdfliteral = nodepool.pdfpageliteral
+nodepool.pdfliteral = nodepool.pdfpageliteral -- best is to use a specific one: origin | page | direct | raw
function nodepool.pdfsave()
return copy_node(pdfsave)
@@ -138,6 +129,17 @@ end
--
-- so we need to force a matrix.
+-- local views = { -- beware, we do support the pdf keys but this is *not* official
+-- xyz = 0, [variables.standard] = 0,
+-- fit = 1, [variables.fit] = 1,
+-- fith = 2, [variables.width] = 2,
+-- fitv = 3, [variables.height] = 3,
+-- fitb = 4,
+-- fitbh = 5, [variables.minwidth] = 5,
+-- fitbv = 6, [variables.minheight] = 6,
+-- fitr = 7,
+-- }
+
function nodepool.pdfdestination(w,h,d,name,view,n)
report("don't use node based destinations!")
os.exit()
diff --git a/tex/context/base/mkiv/lpdf-ren.lua b/tex/context/base/mkiv/lpdf-ren.lua
index 47075ee08..e9b22f382 100644
--- a/tex/context/base/mkiv/lpdf-ren.lua
+++ b/tex/context/base/mkiv/lpdf-ren.lua
@@ -9,8 +9,8 @@ if not modules then modules = { } end modules ['lpdf-ren'] = {
-- rendering
local tostring, tonumber, next = tostring, tonumber, next
-local format, rep = string.format, string.rep
local concat = table.concat
+local formatters = string.formatters
local settings_to_array = utilities.parsers.settings_to_array
local getrandom = utilities.randomizer.get
@@ -49,9 +49,12 @@ local addtocatalog = lpdf.addtocatalog
local escaped = lpdf.escaped
-local nodepool = nodes.pool
+local nuts = nodes.nuts
+local copy_node = nuts.copy
+
+local nodepool = nuts.pool
local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
+local pdfpageliteral = nodepool.pdfpageliteral
local pdf_ocg = pdfconstant("OCG")
local pdf_ocmd = pdfconstant("OCMD")
@@ -62,8 +65,6 @@ local pdf_design = pdfconstant("Design")
local pdf_toggle = pdfconstant("Toggle")
local pdf_setocgstate = pdfconstant("SetOCGState")
-local copy_node = node.copy
-
local pdf_print = {
[v_yes] = pdfdictionary { PrintState = pdf_on },
[v_no ] = pdfdictionary { PrintState = pdf_off },
@@ -238,33 +239,38 @@ function executers.togglelayer(arguments) return setlayer(pdf_toggle,arguments)
-- injection
+local f_bdc = formatters["/OC /%s BDC"]
+local s_emc = "EMC"
+
function codeinjections.startlayer(name) -- used in mp
if not name then
name = "unknown"
end
useviewerlayer(name)
- return format("/OC /%s BDC",escapednames[name])
+ return f_bdc(escapednames[name])
end
function codeinjections.stoplayer(name) -- used in mp
- return "EMC"
+ return s_emc
end
local cache = { }
+local stop = nil
function nodeinjections.startlayer(name)
local c = cache[name]
if not c then
useviewerlayer(name)
- c = register(pdfliteral(format("/OC /%s BDC",escapednames[name])))
+ c = register(pdfpageliteral(f_bdc(escapednames[name])))
cache[name] = c
end
return copy_node(c)
end
-local stop = register(pdfliteral("EMC"))
-
function nodeinjections.stoplayer()
+ if not stop then
+ stop = register(pdfpageliteral(s_emc))
+ end
return copy_node(stop)
end
@@ -280,7 +286,7 @@ function nodeinjections.startstackedlayer(s,t,first,last)
r[#r+1] = startlayer(values[t[i]])
end
r = concat(r," ")
- return pdfliteral(r)
+ return pdfpageliteral(r)
end
function nodeinjections.stopstackedlayer(s,t,first,last)
@@ -289,7 +295,7 @@ function nodeinjections.stopstackedlayer(s,t,first,last)
r[#r+1] = stoplayer()
end
r = concat(r," ")
- return pdfliteral(r)
+ return pdfpageliteral(r)
end
function nodeinjections.changestackedlayer(s,t1,first1,last1,t2,first2,last2)
@@ -301,7 +307,7 @@ function nodeinjections.changestackedlayer(s,t1,first1,last1,t2,first2,last2)
r[#r+1] = startlayer(values[t2[i]])
end
r = concat(r," ")
- return pdfliteral(r)
+ return pdfpageliteral(r)
end
-- transitions
diff --git a/tex/context/base/mkiv/lpdf-res.lua b/tex/context/base/mkiv/lpdf-res.lua
index ac9478488..8b00835ef 100644
--- a/tex/context/base/mkiv/lpdf-res.lua
+++ b/tex/context/base/mkiv/lpdf-res.lua
@@ -21,8 +21,9 @@ local saveboxresource = tex.saveboxresource
local useboxresource = tex.useboxresource
local getboxresource = tex.getboxresourcedimensions
-function codeinjections.registerboxresource(n)
- return saveboxresource(n,nil,lpdf.collectedresources(),true) -- direct, todo: accept functions as attr/resources
+function codeinjections.registerboxresource(n,offset)
+ local r = saveboxresource(n,nil,lpdf.collectedresources(),true,0,offset or 0) -- direct, todo: accept functions as attr/resources
+ return r
end
function codeinjections.restoreboxresource(index)
diff --git a/tex/context/base/mkiv/lpdf-swf.lua b/tex/context/base/mkiv/lpdf-swf.lua
index e40dc6378..0ac107f8b 100644
--- a/tex/context/base/mkiv/lpdf-swf.lua
+++ b/tex/context/base/mkiv/lpdf-swf.lua
@@ -10,17 +10,21 @@ if not modules then modules = { } end modules ['lpdf-swf'] = {
-- was using tex code. This is the official implementation.
local format, gsub = string.format, string.gsub
+local concat = table.concat
-local backends, lpdf = backends, lpdf
+local backends = backends
+local lpdf = lpdf
+local context = context
-local pdfconstant = lpdf.constant
-local pdfstring = lpdf.string
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfflushobject = lpdf.flushobject
+local pdfconstant = lpdf.constant
+local pdfstring = lpdf.string
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfflushobject = lpdf.flushobject
+local pdfsharedobject = lpdf.shareobjectreference
-local checkedkey = lpdf.checkedkey
+local checkedkey = lpdf.checkedkey
local codeinjections = backends.pdf.codeinjections
local nodeinjections = backends.pdf.nodeinjections
@@ -45,17 +49,18 @@ table.setmetatableindex(activations, function() return activations .click end)
table.setmetatableindex(deactivations,function() return deactivations.focus end)
local function insertswf(spec)
-
local width = spec.width
local height = spec.height
local filename = spec.foundname
local resources = spec.resources
local display = spec.display
local controls = spec.controls
+ local arguments = spec.arguments
local resources = resources and parametersets[resources]
local display = display and parametersets[display]
- local controls = controls and parametersets[controls] -- not yet used
+ local controls = controls and parametersets[controls] -- not yet used
+ local arguments = arguments and parametersets[arguments] -- not yet used
local preview = checkedkey(display,"preview","string")
local toolbar = checkedkey(display,"toolbar","boolean")
@@ -63,13 +68,16 @@ local function insertswf(spec)
local embeddedreference = codeinjections.embedfile { file = filename }
local flash = pdfdictionary {
- Subtype = pdfconstant("Flash"),
+ Subtype = pdfconstant("RichMediaConfiguration"),
Instances = pdfarray {
pdfdictionary {
- Asset = embeddedreference,
- Params = pdfdictionary {
- Binding = pdfconstant("Background") -- Foreground makes swf behave erratic
- }
+ Type = pdfconstant("RichMediaInstance"),
+ Asset = embeddedreference,
+ Subtype = pdfconstant("Flash"), -- 3D Sound Video ... somehow still Flash too
+ Params = pdfsharedobject(pdfdictionary {
+ Binding = pdfconstant("Background"), -- Foreground makes swf behave erratic
+ FlashVars = arguments and pdfstring(table.sequenced(arguments,"&")) or nil,
+ }),
},
},
}
@@ -98,7 +106,6 @@ local function insertswf(spec)
local root = file.dirname(filename)
local relativepaths = nil
local paths = nil
-
if resources then
local names = configuration.Assets.Names
local prefix = false
@@ -190,7 +197,8 @@ local function insertswf(spec)
Playcount = 1,
},
Presentation = pdfdictionary {
- PassContextClick = false,
+ -- PassContextClick = false,
+ PassContextClick = true,
Style = pdfconstant("Embedded"),
Toolbar = toolbar,
NavigationPane = false,
@@ -294,6 +302,7 @@ function backends.pdf.nodeinjections.insertswf(spec)
display = spec.display,
controls = spec.controls,
resources = spec.resources,
+ arguments = spec.arguments,
-- factor = spec.factor,
-- label = spec.label,
}
diff --git a/tex/context/base/mkiv/lpdf-tag.lua b/tex/context/base/mkiv/lpdf-tag.lua
index e33c8a811..f4ecfc8a6 100644
--- a/tex/context/base/mkiv/lpdf-tag.lua
+++ b/tex/context/base/mkiv/lpdf-tag.lua
@@ -55,7 +55,8 @@ local tonut = nuts.tonut
local tonode = nuts.tonode
local nodepool = nuts.pool
-local pdfliteral = nodepool.pdfliteral
+local pdfpageliteral = nodepool.pdfpageliteral
+local register = nodepool.register
local getid = nuts.getid
local getattr = nuts.getattr
@@ -63,14 +64,12 @@ local getprev = nuts.getprev
local getnext = nuts.getnext
local getlist = nuts.getlist
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setlist = nuts.setlist
+local copy_node = nuts.copy
local traverse_nodes = nuts.traverse
local tosequence = nuts.tosequence
-local insert_before = nuts.insert_before
-local insert_after = nuts.insert_after
local structure_stack = { }
local structure_kids = pdfarray()
@@ -150,13 +149,15 @@ local function finishstructure()
pdfflushobject(structure_ref,structuretree)
addtocatalog("StructTreeRoot",pdfreference(structure_ref))
--
- local markinfo = pdfdictionary {
- Marked = pdfboolean(true),
- -- UserProperties = pdfboolean(true),
- -- Suspects = pdfboolean(true),
- -- AF = #embeddedfilelist > 0 and pdfreference(pdfflushobject(embeddedfilelist)) or nil,
- }
- addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
+ if lpdf.majorversion() == 1 then
+ local markinfo = pdfdictionary {
+ Marked = pdfboolean(true) or nil,
+ -- UserProperties = pdfboolean(true), -- maybe some day
+ -- Suspects = pdfboolean(true) or nil,
+ -- AF = #embeddedfilelist > 0 and pdfreference(pdfflushobject(embeddedfilelist)) or nil,
+ }
+ addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
+ end
--
for fulltag, element in sortedhash(elements) do -- sorting is easier on comparing pdf
pdfflushobject(element.knum,element.kids)
@@ -312,8 +313,14 @@ end
-- no need to adapt head, as we always operate on lists
+local EMCliteral = nil
+
function nodeinjections.addtags(head)
+ if not EMCliteral then
+ EMCliteral = register(pdfpageliteral("EMC"))
+ end
+
local last = nil
local ranges = { }
local range = nil
@@ -321,8 +328,9 @@ function nodeinjections.addtags(head)
local function collectranges(head,list)
for n in traverse_nodes(head) do
- local id = getid(n) -- 14: image, 8: literal (mp)
+ local id = getid(n)
if id == glyph_code then
+ -- maybe also disc
local at = getattr(n,a_tagged)
if not at then
range = nil
@@ -344,8 +352,7 @@ function nodeinjections.addtags(head)
end
last = nil
else
- local nl = getlist(n)
- collectranges(nl,n)
+ collectranges(getlist(n),n)
end
end
end
@@ -383,7 +390,6 @@ function nodeinjections.addtags(head)
local taglist = specification.taglist
local noftags = #taglist
local common = 0
-
if top then
for i=1,noftags >= noftop and noftop or noftags do
if top[i] == taglist[i] then
@@ -412,12 +418,12 @@ function nodeinjections.addtags(head)
prev = prv
end
end
-
if prev then
- literal = pdfliteral(makecontent(prev,id,specification))
+ literal = pdfpageliteral(makecontent(prev,id,specification))
elseif ignore then
- literal = pdfliteral(makeignore(specification))
+ literal = pdfpageliteral(makeignore(specification))
end
+
if literal then
local prev = getprev(start)
if prev then
@@ -427,14 +433,27 @@ function nodeinjections.addtags(head)
if list and getlist(list) == start then
setlist(list,literal)
end
+ local literal = copy_node(EMCliteral)
-- use insert instead:
- local literal = pdfliteral("EMC")
local next = getnext(stop)
if next then
setlink(literal,next)
end
setlink(stop,literal)
end
+
+-- if literal then
+-- if list and getlist(list) == start then
+-- setlink(literal,start)
+-- setlist(list,literal)
+-- else
+-- setlink(getprev(start),literal,start)
+-- end
+-- -- use insert instead:
+-- local literal = copy_node(EMCliteral)
+-- setlink(stop,literal,getnext(stop))
+-- end
+
top = taglist
noftop = noftags
end
@@ -558,9 +577,9 @@ end
-- end
--
-- if r > 0 then
--- local literal = pdfliteral(concat(result,"\n"))
+-- local literal = pdfpageliteral(concat(result,"\n"))
-- -- use insert instead:
--- local literal = pdfliteral(result)
+-- local literal = pdfpageliteral(result)
-- local prev = getprev(start)
-- if prev then
-- setlink(prev,literal)
@@ -582,7 +601,7 @@ end
-- for i=1,noftop do
-- result[i] = "EMC"
-- end
--- local literal = pdfliteral(concat(result,"\n"))
+-- local literal = pdfpageliteral(concat(result,"\n"))
-- -- use insert instead:
-- local next = getnext(last)
-- if next then
diff --git a/tex/context/base/mkiv/lpdf-u3d.lua b/tex/context/base/mkiv/lpdf-u3d.lua
index c9f4a0369..dfd4c1b06 100644
--- a/tex/context/base/mkiv/lpdf-u3d.lua
+++ b/tex/context/base/mkiv/lpdf-u3d.lua
@@ -461,7 +461,7 @@ local function insert3d(spec) -- width, height, factor, display, controls, label
},
ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") },
}
- local pwd = pdfflushstreamobject(formatters["q /GS gs %F 0 0 %F 0 0 cm /IM Do Q"](factor*width,factor*height),pw)
+ local pwd = pdfflushstreamobject(formatters["q /GS gs %.6F 0 0 %.6F 0 0 cm /IM Do Q"](factor*width,factor*height),pw)
annot.AP = pdfdictionary {
N = pdfreference(pwd)
}
diff --git a/tex/context/base/mkiv/lpdf-wid.lua b/tex/context/base/mkiv/lpdf-wid.lua
index fceae49cb..03febbf01 100644
--- a/tex/context/base/mkiv/lpdf-wid.lua
+++ b/tex/context/base/mkiv/lpdf-wid.lua
@@ -6,7 +6,26 @@ if not modules then modules = { } end modules ['lpdf-wid'] = {
license = "see context related readme files"
}
-local gmatch, gsub, find, lower, format = string.gmatch, string.gsub, string.find, string.lower, string.format
+-- It's about time to give up on media in pdf and admit that pdf lost it to html.
+-- First we had movies and sound, quite easy to deal with, but obsolete now. Then we
+-- had renditions but they turned out to be unreliable from the start and look
+-- obsolete too or at least they are bound to the (obsolete) flash technology for
+-- rendering. They were already complex constructs. Now we have rich media which
+-- instead of providing a robust future proof framework fo rgeneral media types
+-- again seems to depend on viewers built in (yes, also kind of obsolete) flash
+-- technology, and we cannot expect this non-open technology to show up in open
+-- browsers. So, in the end we can best just use links to external resources to be
+-- future proof. Just look at the viewer prferences pane to see how fragile support
+-- is. Interestingly u3d support is kind of built in, while e.g. mp4 support relies
+-- on wrapping in swf. We used to stay ahead of the pack with support of the fancy
+-- pdf features but it backfires and is not worth the trouble. And yes, for control
+-- (even simple like starting and stopping videos) one has to revert to JavaScript,
+-- the other fragile bit. And, now that adobe quits flash in 2020 we're without any
+-- video anyway. Also, it won't play on all platforms and devices so let's wait for
+-- html5 media in pdf then.
+
+local tonumber, next = tonumber, next
+local gmatch, gsub, find, lower = string.gmatch, string.gsub, string.find, string.lower
local stripstring = string.strip
local settings_to_array = utilities.parsers.settings_to_array
local settings_to_hash = utilities.parsers.settings_to_hash
@@ -137,8 +156,8 @@ comment_symbols.Default = Note
local function analyzesymbol(symbol,collection)
if not symbol or symbol == "" then
- return collection.Default, nil
- elseif collection[symbol] then
+ return collection and collection.Default, nil
+ elseif collection and collection[symbol] then
return collection[symbol], nil
else
local setn, setr, setd
@@ -160,6 +179,17 @@ local function analyzesymbol(symbol,collection)
end
end
+local function analyzenormalsymbol(symbol)
+ local appearance = pdfdictionary {
+ N = registeredsymbol(symbol),
+ }
+ local appearanceref = pdfshareobjectreference(appearance)
+ return appearanceref
+end
+
+codeinjections.analyzesymbol = analyzesymbol
+codeinjections.analyzenormalsymbol = analyzenormalsymbol
+
local function analyzelayer(layer)
-- todo: (specification.layer ~= "" and pdfreference(specification.layer)) or nil, -- todo: ref to layer
end
@@ -289,7 +319,8 @@ function codeinjections.embedfile(specification)
local d = pdfdictionary {
Type = pdfconstant("Filespec"),
F = pdfstring(savename),
- UF = pdfstring(savename),
+ -- UF = pdfstring(savename),
+ UF = pdfunicode(savename),
EF = pdfdictionary { F = pdfreference(f) },
Desc = title ~= "" and pdfunicode(title) or nil,
-- AFRelationship = pdfconstant("Source"), -- some day maybe, not mandate
@@ -363,7 +394,7 @@ function nodeinjections.attachfile(specification)
FS = aref,
Contents = pdfunicode(title),
Name = name,
- NM = pdfstring(format("attachment:%s",nofattachments)),
+ NM = pdfstring("attachment:"..nofattachments),
T = author ~= "" and pdfunicode(author) or nil,
Subj = subtitle ~= "" and pdfunicode(subtitle) or nil,
C = analyzecolor(specification.colorvalue,specification.colormodel),
@@ -371,9 +402,13 @@ function nodeinjections.attachfile(specification)
AP = appearance,
OC = analyzelayer(specification.layer),
}
- local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
- box.width, box.height, box.depth = width, height, depth
+ local width = specification.width or 0
+ local height = specification.height or 0
+ local depth = specification.depth or 0
+ local box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
+ box.width = width
+ box.height = height
+ box.depth = depth
return box
end
end
@@ -452,10 +487,12 @@ function nodeinjections.comment(specification) -- brrr: seems to be done twice
CA = analyzetransparency(specification.transparencyvalue),
OC = analyzelayer(specification.layer),
Name = name,
- NM = pdfstring(format("comment:%s",nofcomments)),
+ NM = pdfstring("comment:"..nofcomments),
AP = appearance,
}
- local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
+ local width = specification.width or 0
+ local height = specification.height or 0
+ local depth = specification.depth or 0
local box
if usepopupcomments then
-- rather useless as we can hide/vide
@@ -473,7 +510,9 @@ function nodeinjections.comment(specification) -- brrr: seems to be done twice
else
box = hpack_node(nodeinjections.annotation(width,height,depth,d()))
end
- box.width, box.height, box.depth = width, height, depth -- redundant
+ box.width = width -- redundant
+ box.height = height -- redundant
+ box.depth = depth -- redundant
return box
end
diff --git a/tex/context/base/mkiv/lpdf-xmp.lua b/tex/context/base/mkiv/lpdf-xmp.lua
index 6153b198f..2eb573acb 100644
--- a/tex/context/base/mkiv/lpdf-xmp.lua
+++ b/tex/context/base/mkiv/lpdf-xmp.lua
@@ -98,6 +98,10 @@ pdf.setsuppressoptionalinfo(
local included = backends.included
+local pdfsettrailerid = pdf.settrailerid
+
+pdf.disablecommand("settrailerid")
+
function lpdf.settrailerid(v)
if v then
local b = toboolean(v) or v == ""
@@ -112,7 +116,7 @@ function lpdf.settrailerid(v)
else
report_info("using hashed trailer id %a (%a)",v,h)
end
- pdf.settrailerid(format("[<%s> <%s>]",h,h))
+ pdfsettrailerid(format("[<%s> <%s>]",h,h))
end
end
@@ -241,6 +245,8 @@ end
-- flushing
+local add_xmp_blob = true directives.register("backend.xmp",function(v) add_xmp_blob = v end)
+
local function flushxmpinfo()
commands.pushrandomseed()
commands.setrandomseed(os.time())
@@ -250,7 +256,8 @@ local function flushxmpinfo()
local documentid = "no unique document id here"
local instanceid = "no unique instance id here"
- local producer = format("LuaTeX-%i.%i.%s",math.div(version,100),math.mod(version,100),revision)
+ -- local producer = format("LuaTeX-%i.%02i.%s",math.div(version,100),math.mod(version,100),revision)
+ local producer = format("LuaTeX-%0.2f.%s",version/100,revision)
local creator = "LuaTeX + ConTeXt MkIV"
local time = lpdf.timestamp()
local fullbanner = status.banner
@@ -260,42 +267,45 @@ local function flushxmpinfo()
instanceid = "uuid:" .. os.uuid()
end
- pdfaddxmpinfo("DocumentID", documentid)
- pdfaddxmpinfo("InstanceID", instanceid)
- pdfaddxmpinfo("Producer", producer)
- pdfaddxmpinfo("CreatorTool", creator)
- pdfaddxmpinfo("CreateDate", time)
- pdfaddxmpinfo("ModifyDate", time)
- pdfaddxmpinfo("MetadataDate", time)
- pdfaddxmpinfo("PTEX.Fullbanner", fullbanner)
-
pdfaddtoinfo("Producer", producer)
pdfaddtoinfo("Creator", creator)
pdfaddtoinfo("CreationDate", time)
pdfaddtoinfo("ModDate", time)
-- pdfaddtoinfo("PTEX.Fullbanner", fullbanner) -- no checking done on existence
- local blob = xml.tostring(xml.first(xmp or valid_xmp(),"/x:xmpmeta"))
- local md = pdfdictionary {
- Subtype = pdfconstant("XML"),
- Type = pdfconstant("Metadata"),
- }
- if trace_xmp then
- report_xmp("data flushed, see log file")
- logs.pushtarget("logfile")
- report_xmp("start xmp blob")
- logs.newline()
- logs.writer(blob)
- logs.newline()
- report_xmp("stop xmp blob")
- logs.poptarget()
- end
- blob = format(xpacket,blob)
- if not verbose and pdf.getcompresslevel() > 0 then
- blob = gsub(blob,">%s+<","><")
+ if add_xmp_blob then
+
+ pdfaddxmpinfo("DocumentID", documentid)
+ pdfaddxmpinfo("InstanceID", instanceid)
+ pdfaddxmpinfo("Producer", producer)
+ pdfaddxmpinfo("CreatorTool", creator)
+ pdfaddxmpinfo("CreateDate", time)
+ pdfaddxmpinfo("ModifyDate", time)
+ pdfaddxmpinfo("MetadataDate", time)
+ pdfaddxmpinfo("PTEX.Fullbanner", fullbanner)
+
+ local blob = xml.tostring(xml.first(xmp or valid_xmp(),"/x:xmpmeta"))
+ local md = pdfdictionary {
+ Subtype = pdfconstant("XML"),
+ Type = pdfconstant("Metadata"),
+ }
+ if trace_xmp then
+ report_xmp("data flushed, see log file")
+ logs.pushtarget("logfile")
+ report_xmp("start xmp blob")
+ logs.newline()
+ logs.writer(blob)
+ logs.newline()
+ report_xmp("stop xmp blob")
+ logs.poptarget()
+ end
+ blob = format(xpacket,blob)
+ if not verbose and lpdf.compresslevel() > 0 then
+ blob = gsub(blob,">%s+<","><")
+ end
+ local r = pdfflushstreamobject(blob,md,false) -- uncompressed
+ lpdf.addtocatalog("Metadata",pdfreference(r))
end
- local r = pdfflushstreamobject(blob,md,false) -- uncompressed
- lpdf.addtocatalog("Metadata",pdfreference(r))
commands.poprandomseed() -- hack
end
diff --git a/tex/context/base/mkiv/luat-bas.mkiv b/tex/context/base/mkiv/luat-bas.mkiv
index cb00d8f55..b1af4da3e 100644
--- a/tex/context/base/mkiv/luat-bas.mkiv
+++ b/tex/context/base/mkiv/luat-bas.mkiv
@@ -13,35 +13,38 @@
\writestatus{loading}{ConTeXt Lua Macros / Basic Lua Libraries}
-\registerctxluafile{l-lua} {1.001} % before sandbox
-\registerctxluafile{l-sandbox} {1.001}
-\registerctxluafile{l-package} {1.001}
-\registerctxluafile{l-lpeg} {1.001}
-\registerctxluafile{l-function}{1.001}
-\registerctxluafile{l-string} {1.001}
-\registerctxluafile{l-table} {1.001}
-\registerctxluafile{l-boolean} {1.001}
-\registerctxluafile{l-number} {1.001}
-\registerctxluafile{l-math} {1.001}
-%registerctxluafile{l-aux} {1.001}
-\registerctxluafile{l-io} {1.001}
-\registerctxluafile{l-os} {1.001}
-\registerctxluafile{l-file} {1.001}
-\registerctxluafile{l-gzip} {1.001}
-\registerctxluafile{l-md5} {1.001}
-\registerctxluafile{l-dir} {1.001}
-\registerctxluafile{l-unicode} {1.001}
-%registerctxluafile{l-utils} {1.001}
-\registerctxluafile{l-url} {1.001}
-\registerctxluafile{l-set} {1.001}
+\registerctxluafile{l-lua} {} % before sandbox
+\registerctxluafile{l-macro} {}
+\registerctxluafile{l-sandbox} {}
+\registerctxluafile{l-package} {}
+\registerctxluafile{l-lpeg} {}
+\registerctxluafile{l-function}{}
+\registerctxluafile{l-string} {}
+\registerctxluafile{l-table} {}
+\registerctxluafile{l-boolean} {}
+\registerctxluafile{l-number} {}
+\registerctxluafile{l-math} {}
+%registerctxluafile{l-aux} {}
+\registerctxluafile{l-io} {}
+\registerctxluafile{l-os} {}
+\registerctxluafile{l-file} {}
+\registerctxluafile{l-gzip} {}
+\registerctxluafile{l-md5} {}
+\registerctxluafile{l-dir} {}
+\registerctxluafile{l-unicode} {optimize}
+%registerctxluafile{l-utils} {}
+\registerctxluafile{l-url} {}
+\registerctxluafile{l-set} {}
-% \registerctxluafile{socket.lua}{}
-% \registerctxluafile{ltn12.lua} {}
-% \registerctxluafile{mime.lua} {}
-% \registerctxluafile{http.lua} {}
-% \registerctxluafile{url.lua} {}
-% \registerctxluafile{tp.lua} {}
-% \registerctxluafile{ftp.lua} {}
-% %registerctxluafile{smtp.lua} {}
+\registerctxluafile{l-macro-imp-optimize}{}
+
+% \registerctxluafile{socket}{}
+% \registerctxluafile{ltn12} {}
+% \registerctxluafile{mime} {}
+% \registerctxluafile{http} {}
+% \registerctxluafile{url} {}
+% \registerctxluafile{tp} {}
+% \registerctxluafile{ftp} {}
+% %registerctxluafile{smtp} {}
\endinput
diff --git a/tex/context/base/mkiv/luat-cnf.lua b/tex/context/base/mkiv/luat-cnf.lua
index 4f2c6569e..b6ee15083 100644
--- a/tex/context/base/mkiv/luat-cnf.lua
+++ b/tex/context/base/mkiv/luat-cnf.lua
@@ -19,7 +19,7 @@ texconfig.shell_escape = 't'
luatex = luatex or { }
local luatex = luatex
-texconfig.error_line = 79 -- frozen
+texconfig.error_line = 79 -- frozen / large values can crash
texconfig.expand_depth = 10000
texconfig.half_error_line = 50 -- frozen
texconfig.hash_extra = 100000
@@ -67,7 +67,7 @@ function texconfig.init()
"gzip", "zip", "zlib", "lfs", "ltn12", "mime", "socket", "md5", "fio", "unicode", "utf",
},
extratex = {
- "epdf", "fontloader", "kpse", "mplib",
+ "epdf", "kpse", "mplib", -- "fontloader",
},
obsolete = {
"fontloader", -- can be filled by luat-log
@@ -119,16 +119,21 @@ function texconfig.init()
-- shortcut and helper
- local bytecode = lua.bytecode
+ local setbytecode = lua.setbytecode
+ local getbytecode = lua.getbytecode
local function init(start)
local i = start
local t = os.clock()
- while bytecode[i] do
- bytecode[i]() ;
- bytecode[i] = nil ;
- i = i + 1
- -- collectgarbage('step')
+ while true do
+ local b = getbytecode(i)
+ if b then
+ b() ;
+ setbytecode(i,nil) ;
+ i = i + 1
+ else
+ break
+ end
end
return i - start, os.clock() - t
end
diff --git a/tex/context/base/mkiv/luat-cod.lua b/tex/context/base/mkiv/luat-cod.lua
index 31860db78..91bb7c2e1 100644
--- a/tex/context/base/mkiv/luat-cod.lua
+++ b/tex/context/base/mkiv/luat-cod.lua
@@ -7,27 +7,39 @@ if not modules then modules = { } end modules ['luat-cod'] = {
}
local type, loadfile, tonumber = type, loadfile, tonumber
-local match, gsub, find, format = string.match, string.gsub, string.find, string.format
+local match, gsub, find, format, gmatch = string.match, string.gsub, string.find, string.format, string.gmatch
local texconfig, lua = texconfig, lua
-- some basic housekeeping
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
-texconfig.max_print_line = 100000
-texconfig.max_in_open = 1000
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+
+texconfig.error_line = 79 -- frozen / large values can crash
+texconfig.expand_depth = 10000
+texconfig.half_error_line = 50 -- frozen
+texconfig.max_in_open = 1000
+texconfig.max_print_line = 100000
+texconfig.max_strings = 500000
+texconfig.nest_size = 1000
+texconfig.param_size = 25000
+texconfig.save_size = 100000
+texconfig.stack_size = 10000
-- registering bytecode chunks
-local bytecode = lua.bytecode or { }
+----- bytecode = lua.bytecode or { } -- we use functions
local bytedata = lua.bytedata or { }
local bytedone = lua.bytedone or { }
-lua.bytecode = bytecode -- built in anyway
+---.bytecode = bytecode
lua.bytedata = bytedata
lua.bytedone = bytedone
+local setbytecode = lua.setbytecode
+local getbytecode = lua.getbytecode
+
lua.firstbytecode = 501
lua.lastbytecode = lua.lastbytecode or (lua.firstbytecode - 1) -- as we load ourselves again ... maybe return earlier
@@ -35,24 +47,30 @@ function lua.registeredcodes()
return lua.lastbytecode - lua.firstbytecode + 1
end
--- no file.* functions yet
+-- no file.* and utilities.parsers.* functions yet
-function lua.registercode(filename,version)
+function lua.registercode(filename,options)
local barename = gsub(filename,"%.[%a%d]+$","")
if barename == filename then filename = filename .. ".lua" end
local basename = match(barename,"^.+[/\\](.-)$") or barename
if not bytedone[basename] then
- local code = environment.luafilechunk(filename)
+ local opts = { }
+ if type(options) == "string" and options ~= "" then
+ for s in gmatch(options,"([a-z]+)") do
+ opts[s] = true
+ end
+ end
+ local code = environment.luafilechunk(filename,false,opts.optimize)
if code then
bytedone[basename] = true
if environment.initex then
local n = lua.lastbytecode + 1
- bytedata[n] = { barename, version or "0.000" }
- bytecode[n] = code
+ bytedata[n] = { name = barename, options = opts }
+ setbytecode(n,code)
lua.lastbytecode = n
end
elseif environment.initex then
- texio.write_nl("\nerror loading file: " .. filename .. " (aborting)")
+ texio.write_nl(format("\nerror loading file: %s (aborting)",filename))
os.exit()
end
end
@@ -96,29 +114,39 @@ local targetpath = "."
-- environment.jobname = tex.jobname
-- environment.version = tostring(tex.toks.contextversiontoks)
+-- traditionally the revision has been a one character string and only
+-- pdftex went beyond "9" but anyway we test for it
+
+if LUATEXENGINE == nil then
+ LUATEXENGINE = status.luatex_engine and string.lower(status.luatex_engine)
+ or (find(status.banner,"LuajitTeX",1,true) and "luajittex" or "luatex")
+end
+
if LUATEXVERION == nil then
+ LUATEXVERSION = status.luatex_revision
LUATEXVERSION = status.luatex_version/100
- + tonumber(status.luatex_revision)/1000
+ -- + tonumber(LUATEXVERSION)/1000
+ + (tonumber(LUATEXVERSION) or (string.byte(LUATEXVERSION)-string.byte("a")+10))/1000
end
-if LUATEXENGINE == nil then
- LUATEXENGINE = status.luatex_engine and string.lower(status.luatex_engine)
- or (find(status.banner,"LuajitTeX") and "luajittex" or "luatex")
+if LUATEXFUNCTIONALITY == nil then
+ LUATEXFUNCTIONALITY = status.development_id or 6346
end
if JITSUPPORTED == nil then
- JITSUPPORTED = LUATEXENGINE == "luajittex" or jit
+ JITSUPPORTED = LUATEXENGINE == "luajittex" or jit
end
if INITEXMODE == nil then
- INITEXMODE = status.ini_version
+ INITEXMODE = status.ini_version
end
-environment.initex = INITEXMODE
-environment.initexmode = INITEXMODE
-environment.luatexversion = LUATEXVERSION
-environment.luatexengine = LUATEXENGINE
-environment.jitsupported = JITSUPPORTED
+environment.luatexengine = LUATEXENGINE
+environment.luatexversion = LUATEXVERSION
+environment.luatexfuncitonality = LUATEXFUNCTIONALITY
+environment.jitsupported = JITSUPPORTED
+environment.initex = INITEXMODE
+environment.initexmode = INITEXMODE
if not environment.luafilechunk then
diff --git a/tex/context/base/mkiv/luat-cod.mkiv b/tex/context/base/mkiv/luat-cod.mkiv
index 9ce6161c3..823b918c3 100644
--- a/tex/context/base/mkiv/luat-cod.mkiv
+++ b/tex/context/base/mkiv/luat-cod.mkiv
@@ -67,12 +67,12 @@
%D Reporting the version of \LUA\ that we use is done as follows:
-\edef\luaversion{\ctxwrite{_VERSION}}
+\def\luaversion{\ctxwrite{LUAVERSION}}
\def\registerctxluafile#1#2{\ctxlua{lua.registercode("#1","#2")}}
\def\ctxloadluafile #1{\ctxlua{lua.registercode("#1")}}
-\registerctxluafile{luat-cod}{1.001}
+\registerctxluafile{luat-cod}{}
% \everydump\expandafter{\the\everydump\ctxlua{lua.finalize()}}
diff --git a/tex/context/base/mkiv/luat-env.lua b/tex/context/base/mkiv/luat-env.lua
index 5f2a0d281..5b46b4036 100644
--- a/tex/context/base/mkiv/luat-env.lua
+++ b/tex/context/base/mkiv/luat-env.lua
@@ -97,24 +97,16 @@ local function strippable(filename)
end
end
-function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
+function environment.luafilechunk(filename,silent,macros) -- used for loading lua bytecode in the format
filename = file.replacesuffix(filename, "lua")
local fullname = environment.luafile(filename)
if fullname and fullname ~= "" then
- local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded
--- if trace_locating then
--- report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
--- elseif not silent then
--- texio.write("<",data and "+ " or "- ",fullname,">")
--- end
+ local data = luautilities.loadedluacode(fullname,strippable,filename,macros)
if not silent then
report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
end
return data
else
--- if trace_locating then
--- report_lua("unknown file %a",filename)
--- end
if not silent then
report_lua("unknown file %a",filename)
end
diff --git a/tex/context/base/mkiv/luat-fio.lua b/tex/context/base/mkiv/luat-fio.lua
index 302d17a66..806caefe6 100644
--- a/tex/context/base/mkiv/luat-fio.lua
+++ b/tex/context/base/mkiv/luat-fio.lua
@@ -15,12 +15,10 @@ texconfig.shell_escape = 't'
texconfig.max_print_line = 100000
texconfig.max_in_open = 1000
-if not resolvers.instance then
+if not resolvers.initialized() then
resolvers.reset()
- resolvers.instance.validfile = resolvers.validctxfile
-
-- we now load the file database as we might need files other than
-- tex and lua file on the given path
@@ -86,7 +84,7 @@ if not resolvers.instance then
register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true)
register('find_output_file' , function(name) return name end, true)
register('find_pk_file' , findpk, true)
- register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true)
+ -- register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true)
register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true)
register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true)
register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true)
@@ -100,7 +98,7 @@ if not resolvers.instance then
register('read_map_file' , function(file) return loadbinfile(file,"map") end, true)
-- output
register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
- register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
+ -- register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true)
-- register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true)
@@ -128,7 +126,7 @@ statistics.register("resource resolver", function()
scandata.n,
scandata.time,
scandata.shared,
- #resolvers.instance.foundintrees,
+ #resolvers.foundintrees(),
#scandata.paths > 0 and concat(scandata.paths," ") or ""
)
end)
diff --git a/tex/context/base/mkiv/luat-fmt.lua b/tex/context/base/mkiv/luat-fmt.lua
index f61c659fa..30c55eecc 100644
--- a/tex/context/base/mkiv/luat-fmt.lua
+++ b/tex/context/base/mkiv/luat-fmt.lua
@@ -39,6 +39,9 @@ local function secondaryflags()
if arguments.silent then
flags[#flags+1] = "--c:silent"
end
+ if arguments.errors then
+ flags[#flags+1] = "--c:errors"
+ end
if arguments.jit then
flags[#flags+1] = "--c:jiton"
end
@@ -82,6 +85,7 @@ local runners = {
function environment.make_format(name,arguments)
local engine = environment.ownmain or "luatex"
local silent = environment.arguments.silent
+ local errors = environment.arguments.errors
-- change to format path (early as we need expanded paths)
local olddir = dir.current()
local path = caches.getwritablepath("formats",engine) or "" -- maybe platform
diff --git a/tex/context/base/mkiv/luat-ini.lua b/tex/context/base/mkiv/luat-ini.lua
index 3ea8551c8..de7254922 100644
--- a/tex/context/base/mkiv/luat-ini.lua
+++ b/tex/context/base/mkiv/luat-ini.lua
@@ -16,10 +16,10 @@ userdata = userdata or { } -- for users (e.g. functions etc)
thirddata = thirddata or { } -- only for third party modules
moduledata = moduledata or { } -- only for development team
documentdata = documentdata or { } -- for users (e.g. raw data)
-parametersets = parametersets or { } -- experimental for team
+parametersets = parametersets or { } -- for special purposes
-table.setmetatableindex(moduledata,table.autokey)
-table.setmetatableindex(thirddata, table.autokey)
+table.setmetatableindex(moduledata,"table")
+table.setmetatableindex(thirddata, "table")
if not global then
global = _G
@@ -29,8 +29,12 @@ LUATEXVERSION = status.luatex_version/100
+ tonumber(status.luatex_revision)/1000
LUATEXENGINE = status.luatex_engine and string.lower(status.luatex_engine)
- or (string.find(status.banner,"LuajitTeX") and "luajittex" or "luatex")
+ or (string.find(status.banner,"LuajitTeX",1,true) and "luajittex" or "luatex")
JITSUPPORTED = LUATEXENGINE == "luajittex" or jit
INITEXMODE = status.ini_version
+
+function os.setlocale()
+ -- no need for a message
+end
diff --git a/tex/context/base/mkiv/luat-ini.mkiv b/tex/context/base/mkiv/luat-ini.mkiv
index e19b72d14..25ad7e139 100644
--- a/tex/context/base/mkiv/luat-ini.mkiv
+++ b/tex/context/base/mkiv/luat-ini.mkiv
@@ -98,8 +98,8 @@
{\setcatcodetable\luacatcodes
\the\everyluacode}
-\edef\luamajorversion{\ctxwrite{_MINORVERSION}}
-\edef\luaminorversion{\ctxwrite{_MAJORVERSION}}
+\edef\luamajorversion{\ctxwrite{LUAMINORVERSION}}
+\edef\luaminorversion{\ctxwrite{LUAMAJORVERSION}}
%D We need a way to pass strings safely to \LUA\ without the
%D need for tricky escaping. Compare:
diff --git a/tex/context/base/mkiv/luat-lib.mkiv b/tex/context/base/mkiv/luat-lib.mkiv
index c75b9c6b1..0df31a4e5 100644
--- a/tex/context/base/mkiv/luat-lib.mkiv
+++ b/tex/context/base/mkiv/luat-lib.mkiv
@@ -13,80 +13,80 @@
\writestatus{loading}{ConTeXt Lua Macros / Libraries}
-\registerctxluafile{util-str}{1.001}
-\registerctxluafile{util-tab}{1.001}
-\registerctxluafile{util-fil}{1.001}
-\registerctxluafile{util-sac}{1.001}
-\registerctxluafile{util-sto}{1.001} % could also be done in trac-deb.mkiv
-\registerctxluafile{util-pck}{1.001}
-\registerctxluafile{util-seq}{1.001}
-%registerctxluafile{util-mrg}{1.001} % not needed in context itself, only mtxrun
-%registerctxluafile{util-lua}{1.001} % moved
-\registerctxluafile{util-prs}{1.001}
-\registerctxluafile{util-fmt}{1.001}
-\registerctxluafile{util-dim}{1.001}
+\registerctxluafile{util-str}{}
+\registerctxluafile{util-tab}{}
+\registerctxluafile{util-fil}{optimize}
+\registerctxluafile{util-sac}{optimize}
+\registerctxluafile{util-sto}{} % could also be done in trac-deb.mkiv
+\registerctxluafile{util-pck}{}
+\registerctxluafile{util-seq}{}
+%registerctxluafile{util-mrg}{} % not needed in context itself, only mtxrun
+%registerctxluafile{util-lua}{} % moved
+\registerctxluafile{util-prs}{}
+\registerctxluafile{util-fmt}{}
+\registerctxluafile{util-dim}{}
-%registerctxluafile{trac-inf}{1.001}
-\registerctxluafile{trac-set}{1.001}
-\registerctxluafile{trac-log}{1.001}
-\registerctxluafile{trac-inf}{1.001}
-%registerctxluafile{trac-pro}{1.001}
-\registerctxluafile{util-lua}{1.001}
-\registerctxluafile{util-deb}{1.001} % could also be done in trac-deb.mkiv
+%registerctxluafile{trac-inf}{}
+\registerctxluafile{trac-set}{}
+\registerctxluafile{trac-log}{}
+\registerctxluafile{trac-inf}{}
+%registerctxluafile{trac-pro}{}
+\registerctxluafile{util-lua}{}
+\registerctxluafile{util-deb}{} % could also be done in trac-deb.mkiv
-\registerctxluafile{util-tpl}{1.001} % needs tracker
+\registerctxluafile{util-tpl}{} % needs tracker
-\registerctxluafile{util-sta}{1.001}
+\registerctxluafile{util-sta}{}
-\registerctxluafile{util-sbx}{1.001} % needs tracker and templates
+\registerctxluafile{util-sbx}{} % needs tracker and templates
-\registerctxluafile{data-ini}{1.001}
-\registerctxluafile{data-exp}{1.001}
-\registerctxluafile{data-env}{1.001}
-\registerctxluafile{data-tmp}{1.001}
-\registerctxluafile{data-met}{1.001}
-\registerctxluafile{data-res}{1.001}
-\registerctxluafile{data-inp}{1.001}
-\registerctxluafile{data-out}{1.001}
-\registerctxluafile{data-fil}{1.001}
+\registerctxluafile{data-ini}{}
+\registerctxluafile{data-exp}{}
+\registerctxluafile{data-env}{}
+\registerctxluafile{data-tmp}{}
+\registerctxluafile{data-met}{}
+\registerctxluafile{data-res}{}
+\registerctxluafile{data-inp}{}
+\registerctxluafile{data-out}{}
+\registerctxluafile{data-fil}{}
-\registerctxluafile{data-pre}{1.001}
-\registerctxluafile{data-tex}{1.001}
-\registerctxluafile{data-vir}{1.001}
-\registerctxluafile{data-bin}{1.001}
-\registerctxluafile{data-zip}{1.001}
-%registerctxluafile{data-crl}{1.001}
-\registerctxluafile{data-sch}{1.001}
-\registerctxluafile{data-tre}{1.001}
-\registerctxluafile{data-lua}{1.001}
-\registerctxluafile{data-ctx}{1.001}
-\registerctxluafile{data-con}{1.001}
-\registerctxluafile{data-use}{1.001}
-\registerctxluafile{data-aux}{1.001}
+\registerctxluafile{data-pre}{}
+\registerctxluafile{data-tex}{}
+\registerctxluafile{data-vir}{}
+\registerctxluafile{data-bin}{}
+\registerctxluafile{data-zip}{}
+%registerctxluafile{data-crl}{}
+\registerctxluafile{data-sch}{}
+\registerctxluafile{data-tre}{}
+\registerctxluafile{data-lua}{}
+\registerctxluafile{data-ctx}{}
+\registerctxluafile{data-con}{}
+\registerctxluafile{data-use}{}
+\registerctxluafile{data-aux}{}
-\registerctxluafile{util-lib}{1.001}
+\registerctxluafile{util-lib}{}
-\registerctxluafile{luat-cbk}{1.001}
-\registerctxluafile{luat-run}{1.001}
-\registerctxluafile{luat-fio}{1.001}
-\registerctxluafile{luat-cnf}{1.001}
-\registerctxluafile{luat-lua}{1.001}
-\registerctxluafile{luat-sto}{1.001}
-\registerctxluafile{luat-ini}{1.001}
-\registerctxluafile{util-env}{1.001}
-\registerctxluafile{luat-env}{1.001}
-\registerctxluafile{luat-exe}{1.001} % simplified
-\registerctxluafile{luat-iop}{1.001} % simplified
-\registerctxluafile{luat-bwc}{1.001}
-\registerctxluafile{trac-lmx}{1.001} % might become l-lmx or luat-lmx
-\registerctxluafile{luat-mac}{1.001}
-%registerctxluafile{luat-prp}{1.001} % for the moment of not much use
+\registerctxluafile{luat-cbk}{}
+\registerctxluafile{luat-run}{}
+\registerctxluafile{luat-fio}{}
+\registerctxluafile{luat-cnf}{}
+\registerctxluafile{luat-lua}{}
+\registerctxluafile{luat-sto}{}
+\registerctxluafile{luat-ini}{}
+\registerctxluafile{util-env}{}
+\registerctxluafile{luat-env}{}
+\registerctxluafile{luat-exe}{} % simplified
+\registerctxluafile{luat-iop}{} % simplified
+\registerctxluafile{luat-bwc}{}
+\registerctxluafile{trac-lmx}{} % might become l-lmx or luat-lmx
+\registerctxluafile{luat-mac}{}
+%registerctxluafile{luat-prp}{} % for the moment of not much use
-\registerctxluafile{lxml-tab}{1.001}
-\registerctxluafile{lxml-lpt}{1.001}
-\registerctxluafile{lxml-xml}{1.001}
-\registerctxluafile{lxml-aux}{1.001}
-\registerctxluafile{lxml-mis}{1.001}
+\registerctxluafile{lxml-tab}{}
+\registerctxluafile{lxml-lpt}{}
+\registerctxluafile{lxml-xml}{}
+\registerctxluafile{lxml-aux}{}
+\registerctxluafile{lxml-mis}{}
\normalprotected\def\writestatus#1#2{\ctxlua{logs.status([==[#1]==],[==[#2]==])}}
\normalprotected\def\writestring #1{\ctxlua{logs.writer([==[#1]==],"\string\n")}}
diff --git a/tex/context/base/mkiv/luat-mac.lua b/tex/context/base/mkiv/luat-mac.lua
index 3f1fe6751..44630b194 100644
--- a/tex/context/base/mkiv/luat-mac.lua
+++ b/tex/context/base/mkiv/luat-mac.lua
@@ -215,9 +215,9 @@ end
-- if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
-- local oldsize = #str
-- str = lpegmatch(parser,str,1,true) or str
--- pushtarget("log")
+-- pushtarget("logfile")
-- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
--- poptarget("log")
+-- poptarget()
-- end
-- return str
-- end
@@ -231,9 +231,9 @@ local processors = { }
function processors.mkvi(str,filename)
local oldsize = #str
str = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
+ pushtarget("logfile")
report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
+ poptarget()
return str
end
@@ -246,9 +246,9 @@ function processors.mkix(str,filename) -- we could intercept earlier so that cac
end
local oldsize = #str
str = convertlmxstring(str,document.variables,false) or str
- pushtarget("log")
+ pushtarget("logfile")
report_macros("processed mkix file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
+ poptarget()
return str
end
@@ -262,9 +262,9 @@ function processors.mkxi(str,filename)
local oldsize = #str
str = convertlmxstring(str,document.variables,false) or str
str = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
+ pushtarget("logfile")
report_macros("processed mkxi file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
+ poptarget()
return str
end
@@ -283,9 +283,9 @@ function macros.processmkvi(str,filename)
if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
local oldsize = #str
str = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
+ pushtarget("logfile")
report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
+ poptarget()
end
return str
end
@@ -313,9 +313,9 @@ if resolvers.schemes then
io.savedata(cachename,str)
else
local result = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
+ pushtarget("logfile")
report_macros("processed scheme %a, delta %s",filename,#str-#result)
- poptarget("log")
+ poptarget()
io.savedata(cachename,result)
end
end
diff --git a/tex/context/base/mkiv/luat-run.lua b/tex/context/base/mkiv/luat-run.lua
index 372bbcbfa..59fb0b937 100644
--- a/tex/context/base/mkiv/luat-run.lua
+++ b/tex/context/base/mkiv/luat-run.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['luat-run'] = {
license = "see context related readme files"
}
+local next = next
local format, find = string.format, string.find
local insert, remove = table.insert, table.remove
@@ -20,11 +21,13 @@ local report_lua = logs.reporter("system","lua")
local report_tex = logs.reporter("system","status")
local report_tempfiles = logs.reporter("resolvers","tempfiles")
-luatex = luatex or { }
-local luatex = luatex
+luatex = luatex or { }
+local luatex = luatex
+local synctex = luatex.synctex
-if not luatex.synctex then
- luatex.synctex = table.setmetatableindex(function() return function() end end)
+if not synctex then
+ synctex = table.setmetatableindex(function() return function() end end)
+ luatex.synctex = synctex
end
local startactions = { }
@@ -50,20 +53,33 @@ local function stop_run()
for i=1,#stopactions do
stopactions[i]()
end
+ local quit = logs.finalactions()
if trace_job_status then
statistics.show()
end
if trace_tex_status then
+ logs.newline()
for k, v in table.sortedhash(status.list()) do
report_tex("%S=%S",k,v)
end
end
+ if quit then
+ if status.setexitcode then
+ status.setexitcode(1)
+ if type(quit) == "table" then
+ logs.newline()
+ report_tex("quitting due to: %, t",quit)
+ logs.newline()
+ end
+ end
+ end
if logs.stop_run then
logs.stop_run()
end
end
local function start_shipout_page()
+ synctex.start()
logs.start_page_number()
end
@@ -72,7 +88,7 @@ local function stop_shipout_page()
for i=1,#pageactions do
pageactions[i]()
end
- luatex.synctex.flush()
+ synctex.stop()
end
local function report_output_pages()
@@ -95,24 +111,35 @@ local function pre_dump_actions()
-- statistics.savefmtstatus("\jobname","\contextversion","context.tex")
end
+local function wrapup_synctex()
+ synctex.wrapup()
+end
+
-- this can be done later
-callbacks.register('start_run', start_run, "actions performed at the beginning of a run")
-callbacks.register('stop_run', stop_run, "actions performed at the end of a run")
+callbacks.register('start_run', start_run, "actions performed at the beginning of a run")
+callbacks.register('stop_run', stop_run, "actions performed at the end of a run")
+
+---------.register('show_open', show_open, "actions performed when opening a file")
+---------.register('show_close', show_close, "actions performed when closing a file")
----------.register('show_open', show_open, "actions performed when opening a file")
----------.register('show_close', show_close, "actions performed when closing a file")
+callbacks.register('report_output_pages', report_output_pages, "actions performed when reporting pages")
+callbacks.register('report_output_log', report_output_log, "actions performed when reporting log file")
-callbacks.register('report_output_pages', report_output_pages, "actions performed when reporting pages")
-callbacks.register('report_output_log', report_output_log, "actions performed when reporting log file")
+callbacks.register('start_page_number', start_shipout_page, "actions performed at the beginning of a shipout")
+callbacks.register('stop_page_number', stop_shipout_page, "actions performed at the end of a shipout")
-callbacks.register('start_page_number', start_shipout_page, "actions performed at the beginning of a shipout")
-callbacks.register('stop_page_number', stop_shipout_page, "actions performed at the end of a shipout")
+callbacks.register('process_input_buffer', false, "actions performed when reading data")
+callbacks.register('process_output_buffer', false, "actions performed when writing data")
-callbacks.register('process_input_buffer', false, "actions performed when reading data")
-callbacks.register('process_output_buffer', false, "actions performed when writing data")
+callbacks.register("pre_dump", pre_dump_actions, "lua related finalizers called before we dump the format") -- comes after \everydump
-callbacks.register("pre_dump", pre_dump_actions, "lua related finalizers called before we dump the format") -- comes after \everydump
+if LUATEXFUNCTIONALITY and LUATEXFUNCTIONALITY > 6505 then
+ callbacks.register("finish_synctex", wrapup_synctex, "rename temporary synctex file")
+ callbacks.register('wrapup_run', false, "actions performed after closing files")
+else
+ callbacks.register("finish_synctex_callback", wrapup_synctex, "rename temporary synctex file")
+end
-- an example:
@@ -180,7 +207,7 @@ local function report_start(left,name)
-- report_load("%s > %s",types[left],name or "?")
report_load("type %a, name %a",types[left],name or "?")
end
- elseif find(name,"virtual://") then
+ elseif find(name,"virtual://",1,true) then
insert(stack,false)
else
insert(stack,name)
@@ -188,7 +215,7 @@ local function report_start(left,name)
level = level + 1
-- report_open("%i > %i > %s",level,total,name or "?")
report_open("level %i, order %i, name %a",level,total,name or "?")
- luatex.synctex.setfilename(name)
+ synctex.setfilename(name)
end
end
@@ -199,6 +226,7 @@ local function report_stop(right)
-- report_close("%i > %i > %s",level,total,name or "?")
report_close("level %i, order %i, name %a",level,total,name or "?")
level = level - 1
+ synctex.setfilename(stack[#stack] or tex.jobname)
end
end
end
diff --git a/tex/context/base/mkiv/luat-sto.lua b/tex/context/base/mkiv/luat-sto.lua
index b04d655c2..e67830b0d 100644
--- a/tex/context/base/mkiv/luat-sto.lua
+++ b/tex/context/base/mkiv/luat-sto.lua
@@ -11,8 +11,9 @@ if not modules then modules = { } end modules ['luat-sto'] = {
local type, next, setmetatable, getmetatable, collectgarbage = type, next, setmetatable, getmetatable, collectgarbage
local gmatch, format = string.gmatch, string.format
local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
-local bytecode = lua.bytecode
+local setbytecode = lua.setbytecode
local strippedloadstring = utilities.lua.strippedloadstring
+local loadstring = utilities.lua.loadstring
local formatters = string.formatters
local trace_storage = false
@@ -53,36 +54,6 @@ local n = 0 -- is that one used ?
if environment.initex then
- -- local function dump()
- -- local max = storage.max
- -- for i=1,#data do
- -- local d = data[i]
- -- local message, original, target = d[1], d[2] ,d[3]
- -- local c, code, name = 0, { }, nil
- -- -- we have a nice definer for this
- -- for str in gmatch(target,"([^%.]+)") do
- -- if name then
- -- name = name .. "." .. str
- -- else
- -- name = str
- -- end
- -- c = c + 1 ; code[c] = formatters["%s = %s or { }"](name,name)
- -- end
- -- max = max + 1
- -- if trace_storage then
- -- c = c + 1 ; code[c] = formatters["print('restoring %s from slot %s')"](message,max)
- -- end
- -- c = c + 1 ; code[c] = serialize(original,name)
- -- if trace_storage then
- -- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
- -- end
- -- -- we don't need tracing in such tables
- -- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
- -- collectgarbage("step")
- -- end
- -- storage.max = max
- -- end
-
local function dump()
local max = storage.max
local strip = storage.strip
@@ -105,7 +76,15 @@ if environment.initex then
end
-- we don't need tracing in such tables
dumped = concat({ definition, comment, dumped },"\n")
- bytecode[max] = strippedloadstring(dumped,strip,formatters["slot %s (%s)"](max,name))
+ local code = nil
+ local name = formatters["slot %s (%s)"](max,name)
+ if LUAVERSION >= 5.3 and LUATEXFUNCTIONALITY >= 6454 then
+ local code = loadstring(dumped,name)
+ setbytecode(max,code,strip)
+ else
+ local code = strippedloadstring(dumped,name,strip)
+ setbytecode(max,code)
+ end
collectgarbage("step")
end
storage.max = max
@@ -131,18 +110,6 @@ function lua.collectgarbage(threshold)
end
end
--- -- we also need to count at generation time (nicer for message)
---
--- if lua.bytecode then -- from 0 upwards
--- local i, b = storage.min, lua.bytecode
--- while b[i] do
--- storage.noftables = i
--- b[i]()
--- b[i] = nil
--- i = i + 1
--- end
--- end
-
statistics.register("stored bytecode data", function()
local nofmodules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1)
local nofdumps = (storage.noftables > 0 and storage.noftables ) or storage.max-storage.min + 1
diff --git a/tex/context/base/mkiv/luat-usr.mkiv b/tex/context/base/mkiv/luat-usr.mkiv
index 92d40010c..760de9f21 100644
--- a/tex/context/base/mkiv/luat-usr.mkiv
+++ b/tex/context/base/mkiv/luat-usr.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{luat-usr}{1.001}
+\registerctxluafile{luat-usr}{}
%D A few goodies:
%D
diff --git a/tex/context/base/mkiv/lxml-aux.lua b/tex/context/base/mkiv/lxml-aux.lua
index ee0909cbf..78cf1d6bd 100644
--- a/tex/context/base/mkiv/lxml-aux.lua
+++ b/tex/context/base/mkiv/lxml-aux.lua
@@ -24,7 +24,7 @@ local type, next, setmetatable, getmetatable = type, next, setmetatable, getmeta
local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
local utfbyte = utf.byte
-local lpegmatch = lpeg.match
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local striplinepatterns = utilities.strings.striplinepatterns
local function report(what,pattern,c,e)
@@ -412,7 +412,9 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
end
local data = nil
if name and name ~= "" then
- data = loaddata(name) or ""
+ local d, n = loaddata(name)
+ data = d or ""
+ name = n or name
if trace_inclusions then
report_xml("including %s bytes from %a at level %s by pattern %a and attribute %a (%srecursing)",#data,name,level,pattern,attribute or "",recursive and "" or "not ")
end
@@ -423,6 +425,9 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
-- for the moment hard coded
epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
else
+local settings = xmldata.settings
+local savedresource = settings.currentresource
+settings.currentresource = name
local xi = xmlinheritedconvert(data,xmldata)
if not xi then
epdt[ek.ni] = "" -- xml.empty(d,k)
@@ -433,6 +438,7 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
local child = xml.body(xi) -- xml.assign(d,k,xi)
child.__p__ = ekrt
child.__f__ = name -- handy for tracing
+child.cf = name
epdt[ek.ni] = child
local settings = xmldata.settings
local inclusions = settings and settings.inclusions
@@ -453,6 +459,7 @@ local function include(xmldata,pattern,attribute,recursive,loaddata,level)
end
end
end
+settings.currentresource = savedresource
end
end
end
@@ -502,15 +509,13 @@ function xml.badinclusions(e,sorted)
return getinclusions("badinclusions",e,sorted)
end
-local b_collapser = lpeg.patterns.b_collapser
-local m_collapser = lpeg.patterns.m_collapser
-local e_collapser = lpeg.patterns.e_collapser
+local b_collapser = lpegpatterns.b_collapser
+local m_collapser = lpegpatterns.m_collapser
+local e_collapser = lpegpatterns.e_collapser
-local b_stripper = lpeg.patterns.b_stripper
-local m_stripper = lpeg.patterns.m_stripper
-local e_stripper = lpeg.patterns.e_stripper
-
-local lpegmatch = lpeg.match
+local b_stripper = lpegpatterns.b_stripper
+local m_stripper = lpegpatterns.m_stripper
+local e_stripper = lpegpatterns.e_stripper
local function stripelement(e,nolines,anywhere)
local edt = e.dt
@@ -1017,3 +1022,29 @@ function xml.totable(x,strip,flat)
return convert(x,strip,flat)
end
end
+
+-- namespace, name, attributes
+-- name, attributes
+-- name
+
+function xml.rename(e,namespace,name,attributes)
+ if type(e) ~= "table" or not e.tg then
+ return
+ end
+ if type(name) == "table" then
+ attributes = name
+ name = namespace
+ namespace = ""
+ elseif type(name) ~= "string" then
+ attributes = { }
+ name = namespace
+ namespace = ""
+ end
+ if type(attributes) ~= "table" then
+ attributes = { }
+ end
+ e.ns = namespace
+ e.rn = namespace
+ e.tg = name
+ e.at = attributes
+end
diff --git a/tex/context/base/mkiv/lxml-css.lua b/tex/context/base/mkiv/lxml-css.lua
index b2198f341..a4d15ba1f 100644
--- a/tex/context/base/mkiv/lxml-css.lua
+++ b/tex/context/base/mkiv/lxml-css.lua
@@ -12,10 +12,13 @@ local topattern, is_empty = string.topattern, string.is_empty
local P, S, C, R, Cb, Cg, Carg, Ct, Cc, Cf, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.Cb, lpeg.Cg, lpeg.Carg, lpeg.Ct, lpeg.Cc, lpeg.Cf, lpeg.Cs
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local sort = table.sort
+local setmetatableindex = table.setmetatableindex
xml.css = xml.css or { }
local css = xml.css
+local getid = lxml.getid
+
if not number.dimenfactors then
require("util-dim.lua")
end
@@ -26,6 +29,9 @@ local cmf = 1/dimenfactors.cm
local mmf = 1/dimenfactors.mm
local inf = 1/dimenfactors["in"]
+local whitespace = lpegpatterns.whitespace
+local skipspace = whitespace^0
+
local percentage, exheight, emwidth, pixels
if tex then
@@ -62,7 +68,7 @@ local validdimen = Cg(lpegpatterns.number,'a') * (
+ Cb('a') * Carg(1) / pixels
)
-local pattern = (validdimen * lpegpatterns.whitespace^0)^1
+local pattern = (validdimen * skipspace)^1
-- todo: default if ""
@@ -332,12 +338,12 @@ local function s_attribute(list,collected,c,negate,str,what,value)
end
elseif what == 2 then
-- todo: lpeg
- if find(v,value) then
+ if find(v,value) then -- value can be a pattern
ok = not negate
end
elseif what == 3 then
-- todo: lpeg
- if find(v," ") then
+ if find(v," ",1,true) then
for s in gmatch(v,"[^ ]+") do
if s == value then
ok = not negate
@@ -645,9 +651,7 @@ end
local P, R, S, C, Cs, Ct, Cc, Carg, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Carg, lpeg.match
-local whitespace = lpegpatterns.whitespace
local p_number = lpegpatterns.integer / tonumber
-local p_space = whitespace^0
local p_key = C((R("az","AZ","09") + S("_-"))^1)
local p_left = S("#.[],:()")
@@ -657,10 +661,10 @@ local p_value = C((1-P("]"))^0)
local p_unquoted = (P('"')/"") * C((1-P('"'))^0) * (P('"')/"")
+ (1-P("]"))^1
local p_element = Ct( (
- P(">") * p_space * Cc(s_element_a) +
- P("+") * p_space * Cc(s_element_b) +
- P("~") * p_space * Cc(s_element_c) +
- Cc(s_element_d)
+ P(">") * skipspace * Cc(s_element_a) +
+ P("+") * skipspace * Cc(s_element_b) +
+ P("~") * skipspace * Cc(s_element_c) +
+ Cc(s_element_d)
) * p_tag )
local p_attribute = P("[") * Ct(Cc(s_attribute) * p_key * (
P("=" ) * Cc(1) * Cs( p_unquoted)
@@ -670,16 +674,16 @@ local p_attribute = P("[") * Ct(Cc(s_attribute) * p_key * (
+ P("~=") * Cc(3) * Cs( p_unquoted)
)^0 * P("]"))
-local p_separator = p_space * P(",") * p_space
+local p_separator = skipspace * P(",") * skipspace
-local p_formula = p_space * P("(")
- * p_space
+local p_formula = skipspace * P("(")
+ * skipspace
* (
- p_number * p_space * (C("n") * p_space * (p_number + Cc(0)))^-1
+ p_number * skipspace * (C("n") * skipspace * (p_number + Cc(0)))^-1
+ P("even") * Cc(0) * Cc("n") * Cc(2)
+ P("odd") * Cc(-1) * Cc("n") * Cc(2)
)
- * p_space
+ * skipspace
* P(")")
local p_step = P(".") * Ct(Cc(s_attribute) * Cc("class") * Cc(3) * p_tag)
@@ -699,13 +703,13 @@ local p_step = P(".") * Ct(Cc(s_attribute) * Cc("class") * Cc(3) * p
+ P(":empty") * Ct(Cc(s_empty) )
+ P(":root") * Ct(Cc(s_root) )
-local p_not = P(":not") * Cc(true) * p_space * P("(") * p_space * p_step * p_space * P(")")
-local p_yes = Cc(false) * p_space * p_step
+local p_not = P(":not") * Cc(true) * skipspace * P("(") * skipspace * p_step * skipspace * P(")")
+local p_yes = Cc(false) * skipspace * p_step
-local p_stepper = Ct((p_space * (p_not+p_yes))^1)
-local p_steps = Ct((p_stepper * p_separator^0)^1) * p_space * (P(-1) + function() print("error") end)
+local p_stepper = Ct((skipspace * (p_not+p_yes))^1)
+local p_steps = Ct((p_stepper * p_separator^0)^1) * skipspace * (P(-1) + function() print("error") end)
-local cache = table.setmetatableindex(function(t,k)
+local cache = setmetatableindex(function(t,k)
local v = lpegmatch(p_steps,k) or false
t[k] = v
return v
@@ -877,10 +881,115 @@ xml.applyselector= selector
-- local s = [[ g:empty ]]
-- local s = [[ g:root ]]
+-- local c = css.applyselector(xml.convert(t),s) for i=1,#c do print(xml.tostring(c[i])) end
+
function css.applyselector(x,str)
-- the wrapping needs checking so this is a placeholder
return applyselector({ x },str)
end
--- local c = css.applyselector(xml.convert(t),s) for i=1,#c do print(xml.tostring(c[i])) end
+-- -- Some helpers to map e.g. style attributes:
+--
+-- -- string based (2.52):
+--
+-- local match = string.match
+-- local topattern = string.topattern
+--
+-- function css.stylevalue(root,name)
+-- local list = getid(root).at.style
+-- if list then
+-- local pattern = topattern(name) .. ":%s*([^;]+)"
+-- local value = match(list,pattern)
+-- if value then
+-- context(value)
+-- end
+-- end
+-- end
+--
+-- -- string based, cached (2.28 / 2.17 interfaced):
+--
+-- local match = string.match
+-- local topattern = string.topattern
+--
+-- local patterns = table.setmetatableindex(function(t,k)
+-- local v = topattern(k) .. ":%s*([^;]+)"
+-- t[k] = v
+-- return v
+-- end)
+--
+-- function css.stylevalue(root,name)
+-- local list = getid(root).at.style
+-- if list then
+-- local value = match(list,patterns[name])
+-- if value then
+-- context(value)
+-- end
+-- end
+-- end
+--
+-- -- lpeg based (4.26):
+--
+-- the lpeg variant also removes trailing spaces and accepts spaces before a colon
+
+local ctx_sprint = context.sprint
+local ctx_xmlvalue = context.xmlvalue
+
+local colon = P(":")
+local semicolon = P(";")
+local eos = P(-1)
+local somevalue = (1 - (skipspace * (semicolon + eos)))^1
+local someaction = skipspace * colon * skipspace * (somevalue/ctx_sprint)
+
+-- function css.stylevalue(root,name)
+-- local list = getid(root).at.style
+-- if list then
+-- lpegmatch(P(name * someaction + 1)^0,list)
+-- end
+-- end
+-- -- cache patterns (2.13):
+
+local patterns= setmetatableindex(function(t,k)
+ local v = P(k * someaction + 1)^0
+ t[k] = v
+ return v
+end)
+
+function css.stylevalue(root,name)
+ local list = getid(root).at.style -- hard coded style
+ if list then
+ lpegmatch(patterns[name],list)
+ end
+end
+
+local somevalue = (1 - whitespace - semicolon - eos)^1
+local someaction = skipspace * colon * (skipspace * Carg(1) * C(somevalue)/function(m,s)
+ ctx_xmlvalue(m,s,"") -- use one with two args
+end)^1
+
+local patterns= setmetatableindex(function(t,k)
+ local v = P(k * someaction + 1)^0
+ t[k] = v
+ return v
+end)
+
+function css.mappedstylevalue(root,map,name)
+ local list = getid(root).at.style -- hard coded style
+ if list then
+ lpegmatch(patterns[name],list,1,map)
+ end
+end
+
+-- -- faster interface (1.02):
+
+interfaces.implement {
+ name = "xmlstylevalue",
+ actions = css.stylevalue,
+ arguments = { "string", "string" },
+}
+
+interfaces.implement {
+ name = "xmlmappedstylevalue",
+ actions = css.mappedstylevalue,
+ arguments = { "string", "string", "string" },
+}
diff --git a/tex/context/base/mkiv/lxml-css.mkiv b/tex/context/base/mkiv/lxml-css.mkiv
index 79de85c08..04000a6ca 100644
--- a/tex/context/base/mkiv/lxml-css.mkiv
+++ b/tex/context/base/mkiv/lxml-css.mkiv
@@ -11,7 +11,9 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\registerctxluafile{lxml-css}{1.001}
+\registerctxluafile{lxml-css}{}
+
+\unprotect
\def\ctxmodulecss#1{\ctxlua{moduledata.css.#1}}
@@ -23,7 +25,7 @@
% \else
% \edef\CellPadding{\cssgetsinglepadding{\xmlatt{#1}{cellpadding}}}
% \fi
-
+%
% \starttexdefinition cssgetsinglepadding #1
% \ctxlua {
% context((moduledata.css.padding(
@@ -36,4 +38,39 @@
% }sp
% \stoptexdefinition
-\endinput
+% \startxmlsetups html:settings
+% \xmlsetsetup{#1}{p}{html:p}
+% \stopxmlsetups
+%
+% \xmlmapvalue{ctx-before} {one} {\page BEFORE\par}
+% \xmlmapvalue{ctx-after} {two} {\par AFTER\page}
+% \xmlmapvalue{text-decoration}{underline}{U}
+% \xmlmapvalue{text-decoration}{overline} {O}
+%
+% \startxmlsetups html:p
+% \testfeatureonce{100000}{
+% \edef\foo{\xmlcssstylevalue{#1}{ctx-before}\xmlcssstylevalue{#1}{ctx-after}}
+% }
+% \page {\tttf style="\xmlatt{#1}{style}"} : \elapsedtime\ s \page
+% \xmlvalue{ctx-before}{\xmlcssstylevalue{#1}{ctx-before}}{}
+% \xmlflush{#1}
+% (\xmlcssstylevalue{#1}{text-decoration})
+% (\xmlcssmappedstylevalue{#1}{text-decoration}{text-decoration})
+% \xmlvalue{ctx-after} {\xmlcssstylevalue{#1}{ctx-after}}{}
+% \stopxmlsetups
+%
+% \startbuffer[temp]
+%
+% foo 1
+% foo 2
+%
+% \stopbuffer
+%
+% \xmlregistersetup{html:settings}
+% \xmlprocessbuffer{main}{temp}{}
+
+\let\xmlcssstylevalue \clf_xmlstylevalue
+\let\xmlcssmappedstylevalue\clf_xmlmappedstylevalue
+
+\protect \endinput
+
diff --git a/tex/context/base/mkiv/lxml-ctx.mkiv b/tex/context/base/mkiv/lxml-ctx.mkiv
index 6691e36bb..e0beb22bf 100644
--- a/tex/context/base/mkiv/lxml-ctx.mkiv
+++ b/tex/context/base/mkiv/lxml-ctx.mkiv
@@ -16,7 +16,7 @@
\writestatus{loading}{ConTeXt XML Support / Goodies}
-\registerctxluafile{lxml-ctx}{1.001}
+\registerctxluafile{lxml-ctx}{}
\unprotect
diff --git a/tex/context/base/mkiv/lxml-ent.lua b/tex/context/base/mkiv/lxml-ent.lua
index 93f48046b..df80a7985 100644
--- a/tex/context/base/mkiv/lxml-ent.lua
+++ b/tex/context/base/mkiv/lxml-ent.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['lxml-ent'] = {
license = "see context related readme files"
}
-local type, next, tonumber = type, next, tonumber
+local next = next
local byte, format = string.byte, string.format
local setmetatableindex = table.setmetatableindex
diff --git a/tex/context/base/mkiv/lxml-inf.lua b/tex/context/base/mkiv/lxml-inf.lua
index 8f1157c7d..8d99d6270 100644
--- a/tex/context/base/mkiv/lxml-inf.lua
+++ b/tex/context/base/mkiv/lxml-inf.lua
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['lxml-inf'] = {
-- This file will be loaded runtime by x-pending.tex.
+local next, tostring, type = next, tostring, type
local concat = table.concat
local xmlwithelements = xml.withelements
diff --git a/tex/context/base/mkiv/lxml-ini.lua b/tex/context/base/mkiv/lxml-ini.lua
index 11f634739..f7f882cee 100644
--- a/tex/context/base/mkiv/lxml-ini.lua
+++ b/tex/context/base/mkiv/lxml-ini.lua
@@ -39,19 +39,15 @@ implement { name = "xmldoifelseselfempty", actions = lxml.doifelseempty, arg
--------- { name = "xmlcontent", actions = lxml.content, arguments = "string" }
--------- { name = "xmlflushstripped", actions = lxml.strip, arguments = { "string", true } }
+
implement { name = "xmlall", actions = lxml.all, arguments = { "string", "string" } }
-implement { name = "xmllastmatch", actions = lxml.lastmatch }
-implement { name = "xmlpushmatch", actions = lxml.pushmatch }
-implement { name = "xmlpopmatch", actions = lxml.popmatch }
implement { name = "xmlatt", actions = lxml.att, arguments = { "string", "string" } }
-implement { name = "xmllastatt", actions = lxml.lastatt }
implement { name = "xmlattdef", actions = lxml.att, arguments = { "string", "string", "string" } }
implement { name = "xmlattribute", actions = lxml.attribute, arguments = { "string", "string", "string" } }
implement { name = "xmlattributedef", actions = lxml.attribute, arguments = { "string", "string", "string", "string" } }
-implement { name = "xmlpath", actions = lxml.path, arguments = { "string", "'/'" } }
+implement { name = "xmlbadinclusions", actions = lxml.badinclusions, arguments = "string" }
implement { name = "xmlchainatt", actions = lxml.chainattribute, arguments = { "string", "'/'", "string" } }
implement { name = "xmlchainattdef", actions = lxml.chainattribute, arguments = { "string", "'/'", "string", "string" } }
-implement { name = "xmlrefatt", actions = lxml.refatt, arguments = { "string", "string" } }
implement { name = "xmlchecknamespace", actions = xml.checknamespace, arguments = { "lxmlid", "string", "string" } }
implement { name = "xmlcommand", actions = lxml.command, arguments = { "string", "string", "string" } }
implement { name = "xmlconcat", actions = lxml.concat, arguments = { "string", "string", "string" } } -- \detokenize{#3}
@@ -71,16 +67,21 @@ implement { name = "xmlfirst", actions = lxml.first, arg
implement { name = "xmlflush", actions = lxml.flush, arguments = "string" }
implement { name = "xmlflushcontext", actions = lxml.context, arguments = "string" }
implement { name = "xmlflushlinewise", actions = lxml.flushlinewise, arguments = "string" }
+implement { name = "xmlflushpure", actions = lxml.pure, arguments = "string" }
implement { name = "xmlflushspacewise", actions = lxml.flushspacewise, arguments = "string" }
+implement { name = "xmlflushtext", actions = lxml.text, arguments = "string" }
implement { name = "xmlfunction", actions = lxml.applyfunction, arguments = { "string", "string" } }
implement { name = "xmlinclude", actions = lxml.include, arguments = { "string", "string", "string", true } }
implement { name = "xmlincludeoptions", actions = lxml.include, arguments = { "string", "string", "string", "string" } }
implement { name = "xmlinclusion", actions = lxml.inclusion, arguments = "string" }
+implement { name = "xmlinclusionbase", actions = lxml.inclusion, arguments = { "string", false, true } }
implement { name = "xmlinclusions", actions = lxml.inclusions, arguments = "string" }
-implement { name = "xmlbadinclusions", actions = lxml.badinclusions, arguments = "string" }
implement { name = "xmlindex", actions = lxml.index, arguments = { "string", "string", "string" } } -- can be integer but now we can alias
implement { name = "xmlinlineverbatim", actions = lxml.inlineverbatim, arguments = "string" }
implement { name = "xmllast", actions = lxml.last, arguments = { "string", "string" } }
+implement { name = "xmllastatt", actions = lxml.lastatt }
+implement { name = "xmllastmatch", actions = lxml.lastmatch }
+implement { name = "xmllastpar", actions = lxml.lastpar }
implement { name = "xmlload", actions = lxml.load, arguments = { "string", "string", "string" } }
implement { name = "xmlloadbuffer", actions = lxml.loadbuffer, arguments = { "string", "string", "string" } }
implement { name = "xmlloaddata", actions = lxml.loaddata, arguments = { "string", "string", "string" } }
@@ -90,13 +91,24 @@ implement { name = "xmlmatch", actions = lxml.match, arg
implement { name = "xmlname", actions = lxml.name, arguments = "string" }
implement { name = "xmlnamespace", actions = lxml.namespace, arguments = "string" }
implement { name = "xmlnonspace", actions = lxml.nonspace, arguments = { "string", "string" } }
+implement { name = "xmlpar", actions = lxml.par, arguments = { "string", "string" } }
+implement { name = "xmlparam", actions = lxml.param, arguments = { "string", "string", "string" } }
+implement { name = "xmlpath", actions = lxml.path, arguments = { "string", "'/'" } }
+implement { name = "xmlpopmatch", actions = lxml.popmatch }
implement { name = "xmlpos", actions = lxml.pos, arguments = "string" }
+implement { name = "xmlpure", actions = lxml.pure, arguments = { "string", "string" } }
+implement { name = "xmlpushmatch", actions = lxml.pushmatch }
implement { name = "xmlraw", actions = lxml.raw, arguments = { "string", "string" } }
implement { name = "xmlrawtex", actions = lxml.rawtex, arguments = { "string", "string" } }
+implement { name = "xmlrefatt", actions = lxml.refatt, arguments = { "string", "string" } }
implement { name = "xmlregisterns", actions = xml.registerns, arguments = { "string", "string" } }
implement { name = "xmlremapname", actions = xml.remapname, arguments = { "lxmlid", "string","string","string" } }
implement { name = "xmlremapnamespace", actions = xml.renamespace, arguments = { "lxmlid", "string", "string" } }
implement { name = "xmlsave", actions = lxml.save, arguments = { "string", "string" } }
+implement { name = "xmlsetatt", actions = lxml.setatt, arguments = { "string", "string", "string" } }
+implement { name = "xmlsetattribute", actions = lxml.setattribute, arguments = { "string", "string", "string", "string" } }
+implement { name = "xmlsetpar", actions = lxml.setpar, arguments = { "string", "string", "string" } }
+implement { name = "xmlsetparam", actions = lxml.setparam, arguments = { "string", "string", "string", "string" } }
implement { name = "xmlsetsetup", actions = lxml.setsetup, arguments = { "string", "string", "string" } }
implement { name = "xmlsnippet", actions = lxml.snippet, arguments = { "string", "string" } }
implement { name = "xmlstrip", actions = lxml.strip, arguments = { "string", "string" } }
@@ -106,11 +118,9 @@ implement { name = "xmlstripped", actions = lxml.stripped, arg
implement { name = "xmlstrippednolines", actions = lxml.stripped, arguments = { "string", "string", true } }
implement { name = "xmltag", actions = lxml.tag, arguments = "string" }
implement { name = "xmltext", actions = lxml.text, arguments = { "string", "string" } }
-implement { name = "xmlpure", actions = lxml.pure, arguments = { "string", "string" } }
-implement { name = "xmlflushtext", actions = lxml.text, arguments = "string" }
-implement { name = "xmlflushpure", actions = lxml.pure, arguments = "string" }
implement { name = "xmltobuffer", actions = lxml.tobuffer, arguments = { "string", "string", "string" } }
-implement { name = "xmltobufferverbose", actions = lxml.tobuffer, arguments = { "string", "string", "string", true } }
+implement { name = "xmltobuffertextonly", actions = lxml.tobuffer, arguments = { "string", "string", "string", false } }
+implement { name = "xmltobufferverbose", actions = lxml.tobuffer, arguments = { "string", "string", "string", true, true } }
implement { name = "xmltofile", actions = lxml.tofile, arguments = { "string", "string", "string" } }
implement { name = "xmltoparameters", actions = lxml.toparameters, arguments = "string" }
implement { name = "xmlverbatim", actions = lxml.verbatim, arguments = "string" }
diff --git a/tex/context/base/mkiv/lxml-ini.mkiv b/tex/context/base/mkiv/lxml-ini.mkiv
index 6ba6bc8d4..6f64e545b 100644
--- a/tex/context/base/mkiv/lxml-ini.mkiv
+++ b/tex/context/base/mkiv/lxml-ini.mkiv
@@ -16,15 +16,15 @@
\writestatus{loading}{ConTeXt XML Support / Initialization}
-%registerctxluafile{lxml-tab}{1.001} % loader
-%registerctxluafile{lxml-lpt}{1.001} % parser
-%registerctxluafile{lxml-xml}{1.001} % xml finalizers
-%registerctxluafile{lxml-aux}{1.001} % extras using parser
-%registerctxluafile{lxml-mis}{1.001} % extras independent of parser
-\registerctxluafile{lxml-ent}{1.001} % entity hacks
-\registerctxluafile{lxml-tex}{1.001} % tex finalizers
-\registerctxluafile{lxml-dir}{1.001} % ctx hacks
-\registerctxluafile{lxml-ini}{1.001} % interface
+%registerctxluafile{lxml-tab}{} % loader
+%registerctxluafile{lxml-lpt}{} % parser
+%registerctxluafile{lxml-xml}{} % xml finalizers
+%registerctxluafile{lxml-aux}{} % extras using parser
+%registerctxluafile{lxml-mis}{} % extras independent of parser
+\registerctxluafile{lxml-ent}{} % entity hacks
+\registerctxluafile{lxml-tex}{} % tex finalizers
+\registerctxluafile{lxml-dir}{} % ctx hacks
+\registerctxluafile{lxml-ini}{} % interface
\unprotect
@@ -49,6 +49,7 @@
\let\xmlattdef \clf_xmlattdef
\let\xmlattribute \clf_xmlattribute
\let\xmlattributedef \clf_xmlattributedef
+\let\xmlbadinclusions \clf_xmlbadinclusions
\let\xmlchainatt \clf_xmlchainatt
\let\xmlchainattdef \clf_xmlchainattdef
\let\xmlchecknamespace \clf_xmlchecknamespace
@@ -75,31 +76,38 @@
\let\xmlinclude \clf_xmlinclude
\let\xmlincludeoptions \clf_xmlincludeoptions
\let\xmlinclusion \clf_xmlinclusion
+\let\xmlinclusionbase \clf_xmlinclusionbase
\let\xmlinclusions \clf_xmlinclusions
-\let\xmlbadinclusions \clf_xmlbadinclusions
\let\xmlindex \clf_xmlindex
\let\xmlinlineverbatim \clf_xmlinlineverbatim
\let\xmllast \clf_xmllast
\let\xmllastatt \clf_xmllastatt
\let\xmllastmatch \clf_xmllastmatch
-\let\xmlpushmatch \clf_xmlpushmatch
-\let\xmlpopmatch \clf_xmlpopmatch
+\let\xmllastpar \clf_xmllastpar
\let\xmlloaddirectives \clf_xmlloaddirectives
\let\xmlmain \clf_xmlmain
\let\xmlmatch \clf_xmlmatch
\let\xmlname \clf_xmlname
\let\xmlnamespace \clf_xmlnamespace
\let\xmlnonspace \clf_xmlnonspace
+\let\xmlpar \clf_xmlpar
+\let\xmlparam \clf_xmlparam
\let\xmlpath \clf_xmlpath
+\let\xmlpopmatch \clf_xmlpopmatch
\let\xmlpos \clf_xmlpos
\let\xmlposition \clf_xmlindex
\let\xmlpure \clf_xmlpure
+\let\xmlpushmatch \clf_xmlpushmatch
\let\xmlraw \clf_xmlraw
\let\xmlrefatt \clf_xmlrefatt
\let\xmlregisterns \clf_xmlregisterns % document
\let\xmlremapname \clf_xmlremapname % element
\let\xmlremapnamespace \clf_xmlremapnamespace % document
\let\xmlsave \clf_xmlsave
+\let\xmlsetatt \clf_xmlsetatt
+\let\xmlsetattribute \clf_xmlsetattribute
+\let\xmlsetpar \clf_xmlsetpar
+\let\xmlsetparam \clf_xmlsetparam
\let\xmlsetsetup \clf_xmlsetsetup
\let\xmlsnippet \clf_xmlsnippet
\let\xmlstrip \clf_xmlstrip
@@ -110,13 +118,14 @@
\let\xmltag \clf_xmltag
\let\xmltext \clf_xmltext
\let\xmltobuffer \clf_xmltobuffer % id pattern name
+\let\xmltobuffertextonly \clf_xmltobuffertextonly % id pattern name
\let\xmltobufferverbose \clf_xmltobufferverbose % id pattern name
\let\xmltofile \clf_xmltofile % id pattern filename
\let\xmltoparameters \clf_xmltoparameters
\let\xmlverbatim \clf_xmlverbatim
-\unexpanded\def\xmlinfo #1{\hbox{\ttxx[\clf_xmlname{#1}]}}
-\unexpanded\def\xmlshow #1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
+\unexpanded\def\xmlinfo#1{\hbox{\ttxx[\clf_xmlname{#1}]}}
+\unexpanded\def\xmlshow#1{\startpacked\ttx\xmlverbatim{#1}\stoppacked}
% the next one is handy for mode runs because it enforces a consistent
% #1 indexing (needed when using \xmltext{main:123}{...} like calls
@@ -129,14 +138,38 @@
% goodie:
+\def\xmltempbuffername{xml-temp}
+
+\unexpanded\def\prettyprintbuffer#1#2% only used here
+ {\ifdefined\scitebuffer
+ \scitebuffer[#2][#1]%
+ \else
+ \typebuffer[#1][\c!option=#2]%
+ \fi}
+
\unexpanded\def\xmlprettyprint#1#2%
- {\xmltobufferverbose{#1}{.}{xml-temp}%
- \ifdefined\scitebuffer
- \scitebuffer[#2][xml-temp]%
+ {\xmltobufferverbose{#1}{.}{\xmltempbuffername}%
+ \prettyprintbuffer\xmltempbuffername{#2}}
+
+\unexpanded\def\xmlprettyprinttext#1#2%
+ {\xmltobuffertextonly{#1}{.}{\xmltempbuffername}%
+ \prettyprintbuffer\xmltempbuffername{#2}}
+
+\unexpanded\def\inlineprettyprintbuffer#1#2% only used here
+ {\ifdefined\sciteinlinebuffer
+ \sciteinlinebuffer[#2][#1]%
\else
- \typebuffer[xml-temp][\c!option=#2]%
+ \typeinlinebuffer[#1][\c!option=#2]%
\fi}
+\unexpanded\def\xmlinlineprettyprint#1#2%
+ {\xmltobufferverbose{#1}{.}{\xmltempbuffername}%
+ \inlineprettyprintbuffer\xmltempbuffername{#2}}
+
+\unexpanded\def\xmlinlineprettyprinttext#1#2%
+ {\xmltobuffertextonly{#1}{.}{\xmltempbuffername}%
+ \inlineprettyprintbuffer\xmltempbuffername{#2}}
+
% kind of special:
\let\xmlstartraw\clf_xmlstartraw
diff --git a/tex/context/base/mkiv/lxml-mis.lua b/tex/context/base/mkiv/lxml-mis.lua
index 94a26b974..04ba7b35c 100644
--- a/tex/context/base/mkiv/lxml-mis.lua
+++ b/tex/context/base/mkiv/lxml-mis.lua
@@ -8,8 +8,8 @@ if not modules then modules = { } end modules ['lxml-mis'] = {
local xml, lpeg, string = xml, lpeg, string
+local type = type
local concat = table.concat
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
local format, gsub, match = string.format, string.gsub, string.match
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
@@ -39,7 +39,7 @@ local function xmlgsub(t,old,new) -- will be replaced
end
end
---~ xml.gsub = xmlgsub
+-- xml.gsub = xmlgsub
function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
if d and k then
@@ -51,12 +51,12 @@ function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
end
end
---~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' }
---~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
+-- xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' }
+-- xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
+-- function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
+-- function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
+-- function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
-- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
--
@@ -66,8 +66,8 @@ end
--
-- 1559:0257:0288:0190 (last one suggested by roberto)
--- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
--- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0)
+----- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
+----- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0)
local normal = (1 - S("<&>"))^0
local special = P("<")/"<" + P(">")/">" + P("&")/"&"
local escaped = Cs(normal * (special * normal)^0)
diff --git a/tex/context/base/mkiv/lxml-sor.lua b/tex/context/base/mkiv/lxml-sor.lua
index aba1c3b8d..a30392b95 100644
--- a/tex/context/base/mkiv/lxml-sor.lua
+++ b/tex/context/base/mkiv/lxml-sor.lua
@@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['lxml-sor'] = {
local format, concat, rep = string.format, table.concat, string.rep
local lpegmatch = lpeg.match
+local next = next
local xml = xml
local lxml = lxml
diff --git a/tex/context/base/mkiv/lxml-sor.mkiv b/tex/context/base/mkiv/lxml-sor.mkiv
index 0d8eb6ba1..298828611 100644
--- a/tex/context/base/mkiv/lxml-sor.mkiv
+++ b/tex/context/base/mkiv/lxml-sor.mkiv
@@ -15,7 +15,7 @@
\writestatus{loading}{ConTeXt XML Support / Sorting}
-\registerctxluafile{lxml-sor}{1.001}
+\registerctxluafile{lxml-sor}{}
\unprotect
diff --git a/tex/context/base/mkiv/lxml-tab.lua b/tex/context/base/mkiv/lxml-tab.lua
index 02228c7c5..8d4be58ab 100644
--- a/tex/context/base/mkiv/lxml-tab.lua
+++ b/tex/context/base/mkiv/lxml-tab.lua
@@ -160,9 +160,20 @@ local entities, parameters
local strip, utfize, resolve, cleanup, resolve_predefined, unify_predefined
local dcache, hcache, acache
local mt, dt, nt
+local currentfilename, currentline, linenumbers
+
+local grammar_parsed_text_one
+local grammar_parsed_text_two
+local grammar_unparsed_text
+
+local handle_hex_entity
+local handle_dec_entity
+local handle_any_entity_dtd
+local handle_any_entity_text
local function preparexmlstate(settings)
if settings then
+ linenumbers = settings.linenumbers
stack = { }
level = 0
top = { }
@@ -179,6 +190,8 @@ local function preparexmlstate(settings)
unify_predefined = settings.unify_predefined_entities -- & -> &
cleanup = settings.text_cleanup
entities = settings.entities or { }
+ currentfilename = settings.currentresource
+ currentline = 1
parameters = { }
reported_at_errors = { }
dcache = { }
@@ -193,6 +206,7 @@ local function preparexmlstate(settings)
resolve_predefined = true
end
else
+ linenumbers = false
stack = nil
level = nil
top = nil
@@ -214,6 +228,8 @@ local function preparexmlstate(settings)
dcache = nil
hcache = nil
acache = nil
+ currentfilename = nil
+ currentline = 1
end
end
@@ -258,14 +274,24 @@ local function add_empty(spacing, namespace, tag)
top = stack[level]
dt = top.dt
nt = #dt + 1
- local t = {
+ local t = linenumbers and {
ns = namespace or "",
rn = resolved,
tg = tag,
at = at,
dt = { },
ni = nt, -- set slot, needed for css filtering
- __p__ = top
+ cf = currentfilename,
+ cl = currentline,
+ __p__ = top,
+ } or {
+ ns = namespace or "",
+ rn = resolved,
+ tg = tag,
+ at = at,
+ dt = { },
+ ni = nt, -- set slot, needed for css filtering
+ __p__ = top,
}
dt[nt] = t
setmetatable(t, mt)
@@ -281,18 +307,28 @@ local function add_begin(spacing, namespace, tag)
dt[nt] = spacing
end
local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top = {
+ dt = { }
+ top = linenumbers and {
ns = namespace or "",
rn = resolved,
tg = tag,
at = at,
- dt = { },
+ dt = dt,
ni = nil, -- preset slot, needed for css filtering
- __p__ = stack[level]
+ cf = currentfilename,
+ cl = currentline,
+ __p__ = stack[level],
+ } or {
+ ns = namespace or "",
+ rn = resolved,
+ tg = tag,
+ at = at,
+ dt = dt,
+ ni = nil, -- preset slot, needed for css filtering
+ __p__ = stack[level],
}
setmetatable(top, mt)
- dt = top.dt
- nt = #dt
+ nt = 0
level = level + 1
stack[level] = top
at = { }
@@ -372,7 +408,15 @@ local function add_special(what, spacing, text)
-- forget it
else
nt = nt + 1
- dt[nt] = {
+ dt[nt] = linenumbers and {
+ special = true,
+ ns = "",
+ tg = what,
+ ni = nil, -- preset slot
+ dt = { text },
+ cf = currentfilename,
+ cl = currentline,
+ } or {
special = true,
ns = "",
tg = what,
@@ -404,21 +448,13 @@ local function attribute_specification_error(str)
return str
end
--- these will be set later
-
-local grammar_parsed_text_one
-local grammar_parsed_text_two
-
-local handle_hex_entity
-local handle_dec_entity
-local handle_any_entity_dtd
-local handle_any_entity_text
-
--- in order to overcome lua limitations we wrap entity stuff in a
--- closure
+-- I'm sure that this lpeg can be simplified (less captures) but it evolved ...
+-- so i'm not going to change it now.
do
+ -- In order to overcome lua limitations we wrap entity stuff in a closure.
+
local badentity = "&" -- was "&error;"
xml.placeholders = {
@@ -880,7 +916,14 @@ local function handle_crap_error(chr)
return chr
end
+local function handlenewline()
+ currentline = currentline + 1
+end
+
+local spacetab = S(' \t')
local space = S(' \r\n\t')
+local newline = lpegpatterns.newline / handlenewline
+local anything = P(1)
local open = P('<')
local close = P('>')
local squote = S("'")
@@ -897,67 +940,9 @@ local name = name_yes + name_nop
local utfbom = lpegpatterns.utfbom -- no capture
local spacing = C(space^0)
------ entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close-ampersand)^0
-local hexentitycontent = R("AF","af","09")^1
-local decentitycontent = R("09")^1
-local parsedentity = P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity_dtd) -- can be Cc(true)
-local parsedentity_text= P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity_text) -- can be Cc(false)
------ entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
-local entity = (ampersand/"") * parsedentity * (semicolon/"")
- + ampersand * (anyentitycontent / handle_end_entity)
-local entity_text = (ampersand/"") * parsedentity_text * (semicolon/"")
- + ampersand * (anyentitycontent / handle_end_entity)
-
-local text_unparsed = C((1-open)^1)
-local text_parsed = (Cs((1-open-ampersand)^1)/add_text + Cs(entity_text)/add_text)^1
-
-local somespace = space^1
-local optionalspace = space^0
-
------ value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
-local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
-
-local endofattributes = slash * close + close -- recovery of flacky html
-local whatever = space * name * optionalspace * equal
------ wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
------ wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
------ wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
-local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-
-local attributevalue = value + wrongvalue
-
-local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
------ attributes = (attribute)^0
-
-local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
-
-local parsedtext = text_parsed -- / add_text
-local unparsedtext = text_unparsed / add_text
-local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
-
-local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
-local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
-local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
-
--- todo: combine the opens in:
-
-local begincomment = open * P("!--")
-local endcomment = P("--") * close
-local begininstruction = open * P("?")
-local endinstruction = P("?") * close
-local begincdata = open * P("![CDATA[")
-local endcdata = P("]]") * close
-
-local someinstruction = C((1 - endinstruction)^0)
-local somecomment = C((1 - endcomment )^0)
-local somecdata = C((1 - endcdata )^0)
+local space_nl = spacetab + newline
+local spacing_nl = Cs((space_nl)^0)
+local anything_nl = newline + P(1)
local function weirdentity(k,v)
if trace_entities then
@@ -984,97 +969,177 @@ local function publicentity(k,v,n)
entities[k] = v
end
--- todo: separate dtd parser
+local function install(spacenewline,spacing,anything)
-local begindoctype = open * P("!DOCTYPE")
-local enddoctype = close
-local beginset = P("[")
-local endset = P("]")
-local wrdtypename = C((1-somespace-P(";"))^1)
-local doctypename = C((1-somespace-close)^0)
-local elementdoctype = optionalspace * P(" 0 then
+ local report = logs.startfilelogging("lxml","problematic xml files")
+ for k, v in table.sortedhash(errors) do
+ report("%4i %s",v,k)
+ end
+ logs.stopfilelogging()
+ --
+ if logs.loggingerrors() then
+ logs.starterrorlogging(report,"problematic xml files")
+ for k, v in table.sortedhash(errors) do
+ report("%4i %s",v,k)
+ end
+ logs.stoperrorlogging()
+ end
+ end
+ end)
+
+end
+
function lxml.store(id,root,filename)
loaded[id] = root
xmlsetproperty(root,"name",id)
@@ -481,13 +521,19 @@ end
xml.originalload = xml.originalload or xml.load
-local noffiles, nofconverted = 0, 0
+local noffiles = 0
+local nofconverted = 0
+local linenumbers = false
+
+synctex.registerenabler (function() linenumbers = true end)
+synctex.registerdisabler(function() linenumbers = false end)
function xml.load(filename,settings)
noffiles, nofconverted = noffiles + 1, nofconverted + 1
starttiming(xml)
local ok, data = resolvers.loadbinfile(filename)
settings = settings or { }
+ settings.linenumbers = linenumbers
settings.currentresource = filename
local xmltable = xml.convert((ok and data) or "",settings)
settings.currentresource = nil
@@ -528,6 +574,7 @@ local function lxmlconvert(id,data,compress,currentresource)
resolve_entities = function(str,ent) return entityconverter(id,str,ent) end,
currentresource = tostring(currentresource or id),
preprocessor = lxml.preprocessor,
+ linenumbers = linenumbers,
}
if compress and compress == variables.yes then
settings.strip_cm_and_dt = true
@@ -545,7 +592,8 @@ function lxml.load(id,filename,compress)
noffiles, nofconverted = noffiles + 1, nofconverted + 1
starttiming(xml)
local ok, data = resolvers.loadbinfile(filename)
- local xmltable = lxmlconvert(id,(ok and data) or "",compress,format("id: %s, file: %s",id,filename))
+ -- local xmltable = lxmlconvert(id,(ok and data) or "",compress,formatters["id: %s, file: %s"](id,filename))
+ local xmltable = lxmlconvert(id,(ok and data) or "",compress,filename)
stoptiming(xml)
lxml.store(id,xmltable,filename)
return xmltable, filename
@@ -587,19 +635,19 @@ function lxml.include(id,pattern,attribute,options)
filename = resolveprefix(filename) or filename
end
-- some protection
- if options.rootpath then
- if dirname(filename) == "" and root.filename then
- local dn = dirname(root.filename)
- if dn ~= "" then
- filename = joinfile(dn,filename)
- end
+ if options.rootpath and dirname(filename) == "" and root.filename then
+ local dn = dirname(root.filename)
+ if dn ~= "" then
+ filename = joinfile(dn,filename)
end
end
if trace_loading then
report_lxml("including file %a",filename)
end
noffiles, nofconverted = noffiles + 1, nofconverted + 1
- return resolvers.loadtexfile(filename) or ""
+ return
+ resolvers.loadtexfile(filename) or "",
+ resolvers.findtexfile(filename) or ""
else
return ""
end
@@ -607,10 +655,10 @@ function lxml.include(id,pattern,attribute,options)
stoptiming(xml)
end
-function lxml.inclusion(id,default)
+function lxml.inclusion(id,default,base)
local inclusion = xmlinclusion(getid(id),default)
if inclusion then
- context(inclusion)
+ context(base and basename(inclusion) or inclusion)
end
end
@@ -694,7 +742,32 @@ local default_element_handler = xml.gethandlers("verbose").functions["@el@"]
-- return v
-- end)
+local setfilename = false
+local trace_name = false
+local report_name = logs.reporter("lxml")
+
+synctex.registerenabler (function() setfilename = synctex.setfilename end)
+synctex.registerdisabler(function() setfilename = false end)
+
+local function syncfilename(e,where)
+ local cf = e.cf
+ if cf then
+ local cl = e.cl or 1
+ if trace_name then
+ report_name("set filename, case %a, tag %a, file %a, line %a",where,e.tg,cf,cl)
+ end
+ setfilename(cf,cl);
+ end
+end
+
+trackers.register("system.synctex.xml",function(v)
+ trace_name = v
+end)
+
local function tex_element(e,handlers)
+ if setfilename then
+ syncfilename(e,"element")
+ end
local command = e.command
if command == nil then
default_element_handler(e,handlers)
@@ -895,11 +968,14 @@ function lxml.setaction(id,pattern,action)
end
end
-local function sprint(root) -- check rawroot usage
+local function sprint(root,p) -- check rawroot usage
if root then
local tr = type(root)
if tr == "string" then -- can also be result of lpath
-- rawroot = false -- ?
+ if setfilename and p then
+ syncfilename(p,"sprint s")
+ end
root = xmlunspecialized(root)
lpegmatch(xmltextcapture,root)
elseif tr == "table" then
@@ -910,12 +986,32 @@ local function sprint(root) -- check rawroot usage
root = xmldespecialized(xmltostring(root))
lpegmatch(xmltextcapture,root) -- goes to toc
else
+if setfilename and p then -- and not root.cl
+ syncfilename(p,"sprint t")
+end
xmlserialize(root,xmltexhandler)
end
end
end
end
+-- local function tprint(root) -- we can move sprint inline
+-- local tr = type(root)
+-- if tr == "table" then
+-- local n = #root
+-- if n == 0 then
+-- -- skip
+-- else
+-- for i=1,n do
+-- sprint(root[i])
+-- end
+-- end
+-- elseif tr == "string" then
+-- root = xmlunspecialized(root)
+-- lpegmatch(xmltextcapture,root)
+-- end
+-- end
+
local function tprint(root) -- we can move sprint inline
local tr = type(root)
if tr == "table" then
@@ -924,7 +1020,24 @@ local function tprint(root) -- we can move sprint inline
-- skip
else
for i=1,n do
- sprint(root[i])
+ -- sprint(root[i]) -- inlined because of filename:
+ local ri = root[i]
+ local tr = type(ri)
+ if tr == "string" then -- can also be result of lpath
+ if setfilename then
+ syncfilename(ri,"tprint")
+ end
+ root = xmlunspecialized(ri)
+ lpegmatch(xmltextcapture,ri)
+ elseif tr == "table" then
+ if forceraw then
+ rawroot = ri
+ root = xmldespecialized(xmltostring(ri))
+ lpegmatch(xmltextcapture,ri) -- goes to toc
+ else
+ xmlserialize(ri,xmltexhandler)
+ end
+ end
end
end
elseif tr == "string" then
@@ -942,6 +1055,9 @@ local function cprint(root) -- content
root = xmlunspecialized(root)
lpegmatch(xmltextcapture,root)
else
+ if setfilename then
+ syncfilename(root,"cprint")
+ end
local rootdt = root.dt
if forceraw then
rawroot = root
@@ -961,7 +1077,8 @@ xml.cprint = cprint local xmlcprint = cprint -- calls ct mathml -> will be re
-- now we can flush
function lxml.main(id)
- xmlserialize(getid(id),xmltexhandler) -- the real root (@rt@)
+ local root = getid(id)
+ xmlserialize(root,xmltexhandler) -- the real root (@rt@)
end
-- -- lines (untested)
@@ -1149,7 +1266,7 @@ function lxml.setsetup(id,pattern,setup)
report_lxml("%s lpath matches for pattern: %s","no",pattern)
end
else
- local a, b = match(setup,"^(.+:)([%*%-])$")
+ local a, b = match(setup,"^(.+:)([%*%-%+])$")
if a and b then
local collected = xmlapplylpath(getid(id),pattern)
if collected then
@@ -1327,7 +1444,7 @@ end
local function command(collected,cmd,otherwise)
local n = collected and #collected
if n and n > 0 then
- local wildcard = find(cmd,"%*")
+ local wildcard = find(cmd,"*",1,true)
for c=1,n do -- maybe optimize for n=1
local e = collected[c]
local ix = e.ix
@@ -1351,7 +1468,7 @@ end
-- local wildcards = setmetatableindex(function(t,k)
-- local v = false
--- if find(k,"%*") then
+-- if find(k,"*",1,true) then
-- v = setmetatableindex(function(t,kk)
-- local v = gsub(k,"%*",kk)
-- t[k] = v
@@ -1400,6 +1517,18 @@ local function attribute(collected,a,default)
end
end
+local function parameter(collected,p,default)
+ if collected and #collected > 0 then
+ local pa = collected[1].pa
+ local str = (pa and pa[p]) or default
+ if str and str ~= "" then
+ contextsprint(notcatcodes,str)
+ end
+ elseif default then
+ contextsprint(notcatcodes,default)
+ end
+end
+
local function chainattribute(collected,arguments,default) -- todo: optional levels
if collected and #collected > 0 then
local e = collected[1]
@@ -1556,6 +1685,8 @@ texfinalizers.reverse = reverse
texfinalizers.count = count
texfinalizers.command = command
texfinalizers.attribute = attribute
+texfinalizers.param = parameter
+texfinalizers.parameter = parameter
texfinalizers.text = text
texfinalizers.stripped = stripped
texfinalizers.lower = lower
@@ -1690,6 +1821,15 @@ function lxml.attribute(id,pattern,a,default)
end
end
+function lxml.parameter(id,pattern,p,default)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ parameter(collected,p,default)
+ end
+end
+
+lxml.param = lxml.parameter
+
function lxml.raw(id,pattern) -- the content, untouched by commands
local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
if collected and #collected > 0 then
@@ -1794,45 +1934,6 @@ function lxml.pos(id)
contextsprint(ctxcatcodes,e and e.ni or 0)
end
--- function lxml.att(id,a,default)
--- local root = getid(id)
--- if root then
--- local at = root.at
--- local str = (at and at[a]) or default
--- if str and str ~= "" then
--- contextsprint(notcatcodes,str)
--- end
--- elseif default then
--- contextsprint(notcatcodes,default)
--- end
--- end
---
--- no need for an assignment so:
-
--- function lxml.att(id,a,default)
--- local e = getid(id)
--- if e then
--- local at = e.at
--- if at then
--- -- normally always true
--- local str = at[a]
--- if not str then
--- if default and default ~= "" then
--- contextsprint(notcatcodes,default)
--- end
--- elseif str ~= "" then
--- contextsprint(notcatcodes,str)
--- else
--- -- explicit empty is valid
--- end
--- elseif default and default ~= "" then
--- contextsprint(notcatcodes,default)
--- end
--- elseif default and default ~= "" then
--- contextsprint(notcatcodes,default)
--- end
--- end
-
do
local att
@@ -1890,6 +1991,45 @@ do
end
+do
+
+ local par
+
+ function lxml.par(id,p,default)
+ local e = getid(id)
+ if e then
+ local pa = e.pa
+ if pa then
+ -- normally always true
+ par = pa[p]
+ if not par then
+ if default and default ~= "" then
+ par = default
+ contextsprint(notcatcodes,default)
+ end
+ elseif par ~= "" then
+ contextsprint(notcatcodes,par)
+ else
+ -- explicit empty is valid
+ end
+ elseif default and default ~= "" then
+ par = default
+ contextsprint(notcatcodes,default)
+ end
+ elseif default and default ~= "" then
+ par = default
+ contextsprint(notcatcodes,default)
+ else
+ par = ""
+ end
+ end
+
+ function lxml.lastpar()
+ contextsprint(notcatcodes,par)
+ end
+
+end
+
function lxml.name(id)
local e = getid(id)
if e then
@@ -1932,7 +2072,7 @@ function lxml.flush(id)
if e then
local dt = e.dt
if dt then
- xmlsprint(dt)
+ xmlsprint(dt,e)
end
end
end
@@ -1954,7 +2094,7 @@ function lxml.snippet(id,i)
if dt then
local dti = dt[i]
if dti then
- xmlsprint(dti)
+ xmlsprint(dti,e)
end
end
end
@@ -1999,33 +2139,37 @@ end
-- testers
-local found, empty = xml.found, xml.empty
+do
-local doif, doifnot, doifelse = commands.doif, commands.doifnot, commands.doifelse
+ local found, empty = xml.found, xml.empty
-function lxml.doif (id,pattern) doif (found(getid(id),pattern)) end
-function lxml.doifnot (id,pattern) doifnot (found(getid(id),pattern)) end
-function lxml.doifelse (id,pattern) doifelse(found(getid(id),pattern)) end
-function lxml.doiftext (id,pattern) doif (not empty(getid(id),pattern)) end
-function lxml.doifnottext (id,pattern) doifnot (not empty(getid(id),pattern)) end
-function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) end
+ local doif, doifnot, doifelse = commands.doif, commands.doifnot, commands.doifelse
--- special case: "*" and "" -> self else lpath lookup
+ function lxml.doif (id,pattern) doif (found(getid(id),pattern)) end
+ function lxml.doifnot (id,pattern) doifnot (found(getid(id),pattern)) end
+ function lxml.doifelse (id,pattern) doifelse(found(getid(id),pattern)) end
+ function lxml.doiftext (id,pattern) doif (not empty(getid(id),pattern)) end
+ function lxml.doifnottext (id,pattern) doifnot (not empty(getid(id),pattern)) end
+ function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) end
-local function checkedempty(id,pattern)
- local e = getid(id)
- if not pattern or pattern == "" then
- local dt = e.dt
- local nt = #dt
- return (nt == 0) or (nt == 1 and dt[1] == "")
- else
- return empty(getid(id),pattern)
+ -- special case: "*" and "" -> self else lpath lookup
+
+ local function checkedempty(id,pattern)
+ local e = getid(id)
+ if not pattern or pattern == "" then
+ local dt = e.dt
+ local nt = #dt
+ return (nt == 0) or (nt == 1 and dt[1] == "")
+ else
+ return empty(getid(id),pattern)
+ end
end
-end
-function lxml.doifempty (id,pattern) doif (checkedempty(id,pattern)) end
-function lxml.doifnotempty (id,pattern) doifnot (checkedempty(id,pattern)) end
-function lxml.doifelseempty(id,pattern) doifelse(checkedempty(id,pattern)) end
+ function lxml.doifempty (id,pattern) doif (checkedempty(id,pattern)) end
+ function lxml.doifnotempty (id,pattern) doifnot (checkedempty(id,pattern)) end
+ function lxml.doifelseempty(id,pattern) doifelse(checkedempty(id,pattern)) end
+
+end
-- status info
@@ -2086,12 +2230,13 @@ function lxml.strip(id,pattern,nolines,anywhere)
end
function lxml.stripped(id,pattern,nolines)
- local str = xmltext(getid(id),pattern) or ""
+ local root = getid(id)
+ local str = xmltext(root,pattern) or ""
str = gsub(str,"^%s*(.-)%s*$","%1")
if nolines then
str = gsub(str,"%s+"," ")
end
- xmlsprint(str)
+ xmlsprint(str,root)
end
function lxml.delete(id,pattern)
@@ -2154,13 +2299,23 @@ end
texfinalizers.upperall = xmlfinalizers.upperall
texfinalizers.lowerall = xmlfinalizers.lowerall
-function lxml.tobuffer(id,pattern,name,unescaped)
+function lxml.tobuffer(id,pattern,name,unescaped,contentonly)
local collected = xmlapplylpath(getid(id),pattern)
if collected then
- if unescaped then
- collected = xmlcontent(collected[1]) -- expanded entities !
+ local collected = collected[1]
+ if unescaped == true then
+ -- expanded entities !
+ if contentonly then
+ collected = xmlserializetotext(collected.dt)
+ else
+ collected = xmlcontent(collected)
+ end
+ elseif unescaped == false then
+ local t = { }
+ xmlstring(collected,function(s) t[#t+1] = s end)
+ collected = concat(t)
else
- collected = tostring(collected[1])
+ collected = tostring(collected)
end
buffers.assign(name,collected)
else
@@ -2168,144 +2323,139 @@ function lxml.tobuffer(id,pattern,name,unescaped)
end
end
--- relatively new:
+-- parameters
-local permitted = nil
-local ctx_xmlinjector = context.xmlinjector
+function lxml.setatt(id,name,value)
+ local e = getid(id)
+ if e then
+ local a = e.at
+ if a then
+ a[name] = value
+ else
+ e.at = { [name] = value }
+ end
+ end
+end
-xml.pihandlers["injector"] = function(category,rest,e)
- local options = options_to_array(rest)
- local action = options[1]
- if not action then
- return
+function lxml.setpar(id,name,value)
+ local e = getid(id)
+ if e then
+ local p = e.pa
+ if p then
+ p[name] = value
+ else
+ e.pa = { [name] = value }
+ end
end
- local n = #options
- if n > 1 then
- local category = options[2]
- if category == "*" then
- ctx_xmlinjector(action)
- elseif permitted then
- if n == 2 then
- if permitted[category] then
- ctx_xmlinjector(action)
- end
- else
- for i=2,n do
- local category = options[i]
- if category == "*" or permitted[category] then
+end
+
+function lxml.setattribute(id,pattern,name,value)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ for i=1,#collected do
+ setatt(collected[i],name,value)
+ end
+ end
+end
+
+function lxml.setparameter(id,pattern,name,value)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ for i=1,#collected do
+ setpar(collected[i],name,value)
+ end
+ end
+end
+
+lxml.setparam = lxml.setparameter
+
+-- relatively new:
+
+do
+
+ local permitted = nil
+ local ctx_xmlinjector = context.xmlinjector
+
+ xml.pihandlers["injector"] = function(category,rest,e)
+ local options = options_to_array(rest)
+ local action = options[1]
+ if not action then
+ return
+ end
+ local n = #options
+ if n > 1 then
+ local category = options[2]
+ if category == "*" then
+ ctx_xmlinjector(action)
+ elseif permitted then
+ if n == 2 then
+ if permitted[category] then
ctx_xmlinjector(action)
- return
+ end
+ else
+ for i=2,n do
+ local category = options[i]
+ if category == "*" or permitted[category] then
+ ctx_xmlinjector(action)
+ return
+ end
end
end
end
+ else
+ ctx_xmlinjector(action)
end
- else
- ctx_xmlinjector(action)
end
-end
-local pattern = P("context-") * C((1-patterns.whitespace)^1) * C(P(1)^1)
+ local pattern = P("context-") * C((1-patterns.whitespace)^1) * C(P(1)^1)
-function lxml.applyselectors(id)
- local root = getid(id)
- local function filter(e)
- local dt = e.dt
- if not dt then
- report_lxml("error in selector, no data in %a",e.tg or "?")
- return
- end
- local ndt = #dt
- local done = false
- local i = 1
- while i <= ndt do
- local dti = dt[i]
- if type(dti) == "table" then
- if dti.tg == "@pi@" then
- local text = dti.dt[1]
- local what, rest = lpegmatch(pattern,text)
- if what == "select" then
- local categories = options_to_hash(rest)
- if categories["begin"] then
- local okay = false
- if permitted then
- for k, v in next, permitted do
- if categories[k] then
- okay = k
- break
+ function lxml.applyselectors(id)
+ local root = getid(id)
+ local function filter(e)
+ local dt = e.dt
+ if not dt then
+ report_lxml("error in selector, no data in %a",e.tg or "?")
+ return
+ end
+ local ndt = #dt
+ local done = false
+ local i = 1
+ while i <= ndt do
+ local dti = dt[i]
+ if type(dti) == "table" then
+ if dti.tg == "@pi@" then
+ local text = dti.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "select" then
+ local categories = options_to_hash(rest)
+ if categories["begin"] then
+ local okay = false
+ if permitted then
+ for k, v in next, permitted do
+ if categories[k] then
+ okay = k
+ break
+ end
end
end
- end
- if okay then
- if trace_selectors then
- report_lxml("accepting selector: %s",okay)
- end
- else
- categories.begin = false
- if trace_selectors then
- report_lxml("rejecting selector: % t",sortedkeys(categories))
- end
- end
- for j=i,ndt do
- local dtj = dt[j]
- if type(dtj) == "table" then
- local tg = dtj.tg
- if tg == "@pi@" then
- local text = dtj.dt[1]
- local what, rest = lpegmatch(pattern,text)
- if what == "select" then
- local categories = options_to_hash(rest)
- if categories["end"] then
- i = j
- break
- else
- -- error
- end
- end
- elseif not okay then
- dtj.tg = "@cm@"
+ if okay then
+ if trace_selectors then
+ report_lxml("accepting selector: %s",okay)
end
else
--- dt[j] = "" -- okay ?
- end
- end
- end
- elseif what == "include" then
- local categories = options_to_hash(rest)
- if categories["begin"] then
- local okay = false
- if permitted then
- for k, v in next, permitted do
- if categories[k] then
- okay = k
- break
+ categories.begin = false
+ if trace_selectors then
+ report_lxml("rejecting selector: % t",sortedkeys(categories))
end
end
- end
- if okay then
- if trace_selectors then
- report_lxml("accepting include: %s",okay)
- end
- else
- categories.begin = false
- if trace_selectors then
- report_lxml("rejecting include: % t",sortedkeys(categories))
- end
- end
- if okay then
for j=i,ndt do
local dtj = dt[j]
if type(dtj) == "table" then
local tg = dtj.tg
- if tg == "@cm@" then
- local content = dtj.dt[1]
- local element = root and xml.toelement(content,root)
- dt[j] = element
- element.__p__ = dt -- needs checking
- done = true
- elseif tg == "@pi@" then
+ if tg == "@pi@" then
local text = dtj.dt[1]
local what, rest = lpegmatch(pattern,text)
- if what == "include" then
+ if what == "select" then
local categories = options_to_hash(rest)
if categories["end"] then
i = j
@@ -2314,48 +2464,103 @@ function lxml.applyselectors(id)
-- error
end
end
+ elseif not okay then
+ dtj.tg = "@cm@"
+ end
+ else
+ -- dt[j] = "" -- okay ?
+ end
+ end
+ end
+ elseif what == "include" then
+ local categories = options_to_hash(rest)
+ if categories["begin"] then
+ local okay = false
+ if permitted then
+ for k, v in next, permitted do
+ if categories[k] then
+ okay = k
+ break
+ end
+ end
+ end
+ if okay then
+ if trace_selectors then
+ report_lxml("accepting include: %s",okay)
+ end
+ else
+ categories.begin = false
+ if trace_selectors then
+ report_lxml("rejecting include: % t",sortedkeys(categories))
+ end
+ end
+ if okay then
+ for j=i,ndt do
+ local dtj = dt[j]
+ if type(dtj) == "table" then
+ local tg = dtj.tg
+ if tg == "@cm@" then
+ local content = dtj.dt[1]
+ local element = root and xml.toelement(content,root)
+ dt[j] = element
+ element.__p__ = dt -- needs checking
+ done = true
+ elseif tg == "@pi@" then
+ local text = dtj.dt[1]
+ local what, rest = lpegmatch(pattern,text)
+ if what == "include" then
+ local categories = options_to_hash(rest)
+ if categories["end"] then
+ i = j
+ break
+ else
+ -- error
+ end
+ end
+ end
end
end
end
end
+ elseif dti then
+ filter(dti)
end
- elseif dti then
- filter(dti)
+ end
+ if done then
+ -- probably not needed
+ xml.reindex(dt)
end
end
- if done then
- -- probably not needed
- xml.reindex(dt)
- end
+ i = i + 1
end
- i = i + 1
end
+ xmlwithelements(root,filter)
end
- xmlwithelements(root,filter)
-end
-function xml.setinjectors(set)
- local s = settings_to_set(set)
- if permitted then
- for k, v in next, s do
- permitted[k] = true
+ function xml.setinjectors(set)
+ local s = settings_to_set(set)
+ if permitted then
+ for k, v in next, s do
+ permitted[k] = true
+ end
+ else
+ permitted = s
end
- else
- permitted = s
end
-end
-function xml.resetinjectors(set)
- if permitted and set and set ~= "" then
- local s = settings_to_set(set)
- for k, v in next, s do
- if v then
- permitted[k] = nil
+ function xml.resetinjectors(set)
+ if permitted and set and set ~= "" then
+ local s = settings_to_set(set)
+ for k, v in next, s do
+ if v then
+ permitted[k] = nil
+ end
end
+ else
+ permitted = nil
end
- else
- permitted = nil
end
+
end
implement {
@@ -2375,3 +2580,20 @@ implement {
actions = lxml.applyselectors,
arguments = "string"
}
+
+-- bonus: see x-lmx-html.mkiv
+
+function texfinalizers.xml(collected,name,setup)
+ local root = collected[1]
+ if not root then
+ return
+ end
+ if not name or name == "" then
+ report_lxml("missing name in xml finalizer")
+ return
+ end
+ xmlrename(root,name)
+ name = "lmx:" .. name
+ buffers.assign(name,strip(xmltostring(root)))
+ context.xmlprocessbuffer(name,name,setup or (name..":setup"))
+end
diff --git a/tex/context/base/mkiv/lxml-xml.lua b/tex/context/base/mkiv/lxml-xml.lua
index d0e256078..d1520800f 100644
--- a/tex/context/base/mkiv/lxml-xml.lua
+++ b/tex/context/base/mkiv/lxml-xml.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['lxml-xml'] = {
license = "see context related readme files"
}
+local tonumber, next = tonumber, next
local concat = table.concat
local find, lower, upper = string.find, string.lower, string.upper
@@ -161,6 +162,10 @@ local function xmltotext(root)
end
end
+function xml.serializetotext(root)
+ return root and xmlserialize(root,xmltexthandler) or ""
+end
+
--
local function text(collected) -- hybrid
diff --git a/tex/context/base/mkiv/m-fonts-plugins.mkiv b/tex/context/base/mkiv/m-fonts-plugins.mkiv
index ecb311694..b74b8c3d0 100644
--- a/tex/context/base/mkiv/m-fonts-plugins.mkiv
+++ b/tex/context/base/mkiv/m-fonts-plugins.mkiv
@@ -19,8 +19,10 @@
%D below should work in the generic loader too. It's anyhow an illustration of
%D how \type {ffi} can work be used in a practical application.
-\registerctxluafile{font-txt}{1.001} % generic text handler
-\registerctxluafile{font-phb}{1.001} % harfbuzz plugin
+% \enabletrackers[resolvers.ffilib]
+
+\registerctxluafile{font-txt}{} % generic text handler
+\registerctxluafile{font-phb}{} % harfbuzz plugin
\startluacode
@@ -132,6 +134,8 @@
liga=yes,
kern=yes]
+% no tlig and no analyze
+
\definefontfeature
[test-node]
[mode=node,
@@ -199,10 +203,11 @@
[arabic-uniscribe]
[mode=plug,
features=harfbuzz,
- script=arab,language=dflt,ccmp=yes,
- init=yes,medi=yes,fina=yes,isol=yes,
- liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes,
- mark=yes,mkmk=yes,kern=yes,curs=yes,
+ script=arab,language=dflt,
+% ccmp=yes,
+% init=yes,medi=yes,fina=yes,isol=yes,
+% liga=yes,dlig=yes,rlig=yes,clig=yes,calt=yes,
+% mark=yes,mkmk=yes,kern=yes,curs=yes,
shaper=uniscribe]
\starttexdefinition RunLatinTest #1#2#3#4#5
@@ -318,7 +323,7 @@
context.RunLatinTest (name, 100, "context", "base", "latin")
context.RunLatinTest (name, 100, "context", "node", "latin")
context.RunLatinTest (name, 100, "harfbuzz", "native", "latin")
- -- context.RunLatinTest (name, 100, "harfbuzz", "uniscribe", "latin")
+ context.RunLatinTest (name, 100, "harfbuzz", "uniscribe", "latin")
-- context.RunLatinTest (name, 1, "context", "text", "latin")
-- context.RunLatinTest (name, 1, "harfbuzz", "binary", "latin")
@@ -334,7 +339,8 @@
-- ARABIC
local list = {
- "arabtype"
+ "arabtype",
+ "husayni",
}
data.timings["arabic"] = {
@@ -351,7 +357,7 @@
context.RunArabicTest (name, 100, "context", "base", "arabic")
context.RunArabicTest (name, 100, "context", "node", "arabic")
context.RunArabicTest (name, 100, "harfbuzz", "native", "arabic")
- -- context.RunArabicTest (name, 100, "harfbuzz", "uniscribe", "arabic")
+ context.RunArabicTest (name, 100, "harfbuzz", "uniscribe", "arabic")
-- context.RunArabicTest (name, 1, "context", "text", "arabic")
-- context.RunArabicTest (name, 1, "harfbuzz", "binary", "arabic")
@@ -367,7 +373,8 @@
-- MIXED
local list = {
- "arabtype"
+ "arabtype",
+ "husayni"
}
data.timings["mixed"] = {
@@ -384,7 +391,7 @@
context.RunArabicTest (name, 100, "context", "base", "mixed")
context.RunArabicTest (name, 100, "context", "node", "mixed")
context.RunArabicTest (name, 100, "harfbuzz", "native", "mixed")
- -- context.RunArabicTest (name, 100, "harfbuzz", "uniscribe", "mixed")
+ context.RunArabicTest (name, 100, "harfbuzz", "uniscribe", "mixed")
-- context.RunArabicTest (name, 1, "context", "text", "mixed")
-- context.RunArabicTest (name, 1, "harfbuzz", "binary", "mixed")
diff --git a/tex/context/base/mkiv/m-oldotf.mkiv b/tex/context/base/mkiv/m-oldotf.mkiv
deleted file mode 100644
index f860df712..000000000
--- a/tex/context/base/mkiv/m-oldotf.mkiv
+++ /dev/null
@@ -1,77 +0,0 @@
-%D \module
-%D [ file=m-oldotf,
-%D version=2015.07.08,
-%D title=\CONTEXT\ Extra Modules,
-%D subtitle=Traditional OTF Loader,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\unprotect
-
-\startluacode
- local files = {
- "font-otf",
- "font-otb",
- "font-inj",
--- "font-ota",
- "font-otn",
- "font-otd",
- "font-otp",
- "font-otc",
- "font-oth",
- "font-odv",
- "font-one",
- "font-map",
- "font-fbk",
- }
- local report = logs.reporter("oldotf")
- local findfile = resolvers.findfile
- local addsuffix = file.addsuffix
- report()
- report("using traditional font loader code")
- report()
- for i=1,#files do
- local foundfile = findfile(addsuffix(files[i],"lua"))
- if foundfile and foundfile ~= "" then
- report("loading %a",foundfile)
- dofile(foundfile)
- end
- end
- report()
-
- -- needed for testing:
-
- local nuts = nodes.nuts
- local copy_node = nuts.copy
- local kern = nuts.pool.register(nuts.pool.kern())
- local setfield = nuts.setfield
-
- nuts.setattr(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
-
- nodes.injections.installnewkern(function(k)
- local c = copy_node(kern)
- setfield(c,"kern",k)
- return c
- end)
-
- directives.register("fonts.injections.fontkern", function(v) setfield(kern,"subtype",v and 0 or 1) end)
-
- local fonts = fonts
- local handlers = fonts.handlers
- local otf = handlers.otf -- brrr
- local afm = handlers.afm -- brrr
- local getters = fonts.getters
-
- getters.kern .opentype = otf.getkern
- getters.substitution.opentype = otf.getsubstitution
- getters.alternate .opentype = otf.getalternate
- getters.multiple .opentype = otf.getmultiple
-
-\stopluacode
-
-\protect \endinput
diff --git a/tex/context/base/mkiv/math-acc.mkvi b/tex/context/base/mkiv/math-acc.mkvi
index c3f8bad40..3411edd1a 100644
--- a/tex/context/base/mkiv/math-acc.mkvi
+++ b/tex/context/base/mkiv/math-acc.mkvi
@@ -190,7 +190,7 @@
\let\normalcheck \check \unexpanded\def\dotlesscheck #1{\normalcheck {\mathdotless#1}}
\let\normalbreve \breve \unexpanded\def\dotlessbreve #1{\normalbreve {\mathdotless#1}}
\let\normaldot \dot \unexpanded\def\dotlessdot #1{\normaldot {\mathdotless#1}}
-\let\normalmathring\mathring \unexpanded\def\dotlessmathring#1{\normalmathring{\mathdotless#1}}
+\let\normalring \ring \unexpanded\def\dotlessring #1{\normalring {\mathdotless#1}}
\let\normaltilde \tilde \unexpanded\def\dotlesstilde #1{\normaltilde {\mathdotless#1}}
\let\normaldddot \dddot \unexpanded\def\dotlessdddot #1{\normaldddot {\mathdotless#1}}
@@ -205,8 +205,10 @@
\unexpanded\def\check {\math_accents_auto_dotless\normalcheck \dotlesscheck }
\unexpanded\def\breve {\math_accents_auto_dotless\normalbreve \dotlessbreve }
\unexpanded\def\dot {\math_accents_auto_dotless\normaldot \dotlessdot }
-\unexpanded\def\mathring{\math_accents_auto_dotless\normalmathring\dotlessmathring}
+\unexpanded\def\ring {\math_accents_auto_dotless\normalring \dotlessring }
\unexpanded\def\tilde {\math_accents_auto_dotless\normaltilde \dotlesstilde }
\unexpanded\def\dddot {\math_accents_auto_dotless\normaldddot \dotlessdddot }
+\let\mathring\ring % for a while
+
\protect \endinput
diff --git a/tex/context/base/mkiv/math-act.lua b/tex/context/base/mkiv/math-act.lua
index ddc7510b1..77a355b22 100644
--- a/tex/context/base/mkiv/math-act.lua
+++ b/tex/context/base/mkiv/math-act.lua
@@ -91,8 +91,6 @@ function mathematics.scaleparameters(target,original)
end
end
-sequencers.appendaction("mathparameters","system","mathematics.scaleparameters")
-
-- AccentBaseHeight vs FlattenedAccentBaseHeight
function mathematics.checkaccentbaseheight(target,original)
@@ -102,8 +100,6 @@ function mathematics.checkaccentbaseheight(target,original)
end
end
-sequencers.appendaction("mathparameters","system","mathematics.checkaccentbaseheight") -- should go in lfg instead
-
function mathematics.checkprivateparameters(target,original)
local mathparameters = target.mathparameters
if mathparameters then
@@ -131,8 +127,6 @@ function mathematics.checkprivateparameters(target,original)
end
end
-sequencers.appendaction("mathparameters","system","mathematics.checkprivateparameters")
-
function mathematics.overloadparameters(target,original)
local mathparameters = target.mathparameters
if mathparameters and next(mathparameters) then
@@ -176,8 +170,6 @@ function mathematics.overloadparameters(target,original)
end
end
-sequencers.appendaction("mathparameters","system","mathematics.overloadparameters")
-
local function applytweaks(when,target,original)
local goodies = original.goodies
if goodies then
@@ -185,9 +177,9 @@ local function applytweaks(when,target,original)
local goodie = goodies[i]
local mathematics = goodie.mathematics
local tweaks = mathematics and mathematics.tweaks
- if tweaks then
+ if type(tweaks) == "table" then
tweaks = tweaks[when]
- if tweaks then
+ if type(tweaks) == "table" then
if trace_defining then
report_math("tweaking math of %a @ %p (%s)",target.properties.fullname,target.parameters.size,when)
end
@@ -218,9 +210,16 @@ function mathematics.tweakaftercopyingfont(target,original)
end
end
+sequencers.appendaction("mathparameters","system","mathematics.scaleparameters")
+sequencers.appendaction("mathparameters","system","mathematics.checkaccentbaseheight") -- should go in lfg instead
+sequencers.appendaction("mathparameters","system","mathematics.checkprivateparameters") -- after scaling !
+sequencers.appendaction("mathparameters","system","mathematics.overloadparameters")
+
sequencers.appendaction("beforecopyingcharacters","system","mathematics.tweakbeforecopyingfont")
sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweakaftercopyingfont")
+local virtualized = mathematics.virtualized
+
function mathematics.overloaddimensions(target,original,set)
local goodies = target.goodies
if goodies then
@@ -232,15 +231,26 @@ function mathematics.overloaddimensions(target,original,set)
if trace_defining then
report_math("overloading dimensions in %a @ %p",target.properties.fullname,target.parameters.size)
end
- local characters = target.characters
- local parameters = target.parameters
- local factor = parameters.factor
- local hfactor = parameters.hfactor
- local vfactor = parameters.vfactor
- local addprivate = fonts.helpers.addprivate
+ local characters = target.characters
+ local descriptions = target.descriptions
+ local parameters = target.parameters
+ local factor = parameters.factor
+ local hfactor = parameters.hfactor
+ local vfactor = parameters.vfactor
+ local addprivate = fonts.helpers.addprivate
+ -- to be sure
+ target.type = "virtual"
+ target.properties.virtualized = true
+ --
local function overload(dimensions)
for unicode, data in next, dimensions do
local character = characters[unicode]
+ if not character then
+ local c = virtualized[unicode]
+ if c then
+ character = characters[c]
+ end
+ end
if character then
--
local width = data.width
@@ -249,26 +259,43 @@ function mathematics.overloaddimensions(target,original,set)
if trace_defining and (width or height or depth) then
report_math("overloading dimensions of %C, width %a, height %a, depth %a",unicode,width,height,depth)
end
- if width then character.width = width * hfactor end
- if height then character.height = height * vfactor end
- if depth then character.depth = depth * vfactor end
+ if width then character.width = width * hfactor end
+ if height then character.height = height * vfactor end
+ if depth then character.depth = depth * vfactor end
--
local xoffset = data.xoffset
local yoffset = data.yoffset
- if xoffset then
+ if xoffset == "llx" then
+ local d = descriptions[unicode]
+ if d then
+ xoffset = - d.boundingbox[1] * hfactor
+ character.width = character.width + xoffset
+ xoffset = { "right", xoffset }
+ end
+ elseif xoffset then
xoffset = { "right", xoffset * hfactor }
end
if yoffset then
yoffset = { "down", -yoffset * vfactor }
end
if xoffset or yoffset then
- local slot = { "slot", 1, addprivate(target,nil,fastcopy(character)) }
- if xoffset and yoffset then
- character.commands = { xoffset, yoffset, slot }
- elseif xoffset then
- character.commands = { xoffset, slot }
+ if character.commands then
+ if yoffset then
+ insert(character.commands,1,yoffset)
+ end
+ if xoffset then
+ insert(character.commands,1,xoffset)
+ end
else
- character.commands = { yoffset, slot }
+ -- local slot = { "slot", 1, addprivate(target,nil,fastcopy(character)) }
+ local slot = { "slot", 0, addprivate(target,nil,fastcopy(character)) }
+ if xoffset and yoffset then
+ character.commands = { xoffset, yoffset, slot }
+ elseif xoffset then
+ character.commands = { xoffset, slot }
+ else
+ character.commands = { yoffset, slot }
+ end
end
character.index = nil
end
@@ -502,12 +529,14 @@ local function horizontalcode(family,unicode)
end
elseif kind == e_right then
local charlist = data[3].horiz_variants
- local right = charlist[#charlist]
- roffset = abs((right["start"] or 0) - (right["end"] or 0))
+ if charlist then
+ local right = charlist[#charlist]
+ roffset = abs((right["start"] or 0) - (right["end"] or 0))
+ end
elseif kind == e_horizontal then
local charlist = data[3].horiz_variants
if charlist then
- local left = charlist[1]
+ local left = charlist[1]
local right = charlist[#charlist]
loffset = abs((left ["start"] or 0) - (left ["end"] or 0))
roffset = abs((right["start"] or 0) - (right["end"] or 0))
@@ -541,54 +570,6 @@ interfaces.implement {
-- check: when true, only set when present in font
-- force: when false, then not set when already set
-local blocks = characters.blocks -- this will move to char-ini
-
--- operators : 0x02200
--- symbolsa : 0x02701
--- symbolsb : 0x02901
--- supplemental : 0x02A00
-
--- from mathematics.gaps:
-
-blocks["lowercaseitalic"].gaps = {
- [0x1D455] = 0x0210E, -- ℎ h
-}
-
-blocks["uppercasescript"].gaps = {
- [0x1D49D] = 0x0212C, -- ℬ script B
- [0x1D4A0] = 0x02130, -- ℰ script E
- [0x1D4A1] = 0x02131, -- ℱ script F
- [0x1D4A3] = 0x0210B, -- ℋ script H
- [0x1D4A4] = 0x02110, -- ℐ script I
- [0x1D4A7] = 0x02112, -- ℒ script L
- [0x1D4A8] = 0x02133, -- ℳ script M
- [0x1D4AD] = 0x0211B, -- ℛ script R
-}
-
-blocks["lowercasescript"].gaps = {
- [0x1D4BA] = 0x0212F, -- ℯ script e
- [0x1D4BC] = 0x0210A, -- ℊ script g
- [0x1D4C4] = 0x02134, -- ℴ script o
-}
-
-blocks["uppercasefraktur"].gaps = {
- [0x1D506] = 0x0212D, -- ℭ fraktur C
- [0x1D50B] = 0x0210C, -- ℌ fraktur H
- [0x1D50C] = 0x02111, -- ℑ fraktur I
- [0x1D515] = 0x0211C, -- ℜ fraktur R
- [0x1D51D] = 0x02128, -- ℨ fraktur Z
-}
-
-blocks["uppercasedoublestruck"].gaps = {
- [0x1D53A] = 0x02102, -- ℂ bb C
- [0x1D53F] = 0x0210D, -- ℍ bb H
- [0x1D545] = 0x02115, -- ℕ bb N
- [0x1D547] = 0x02119, -- ℙ bb P
- [0x1D548] = 0x0211A, -- ℚ bb Q
- [0x1D549] = 0x0211D, -- ℝ bb R
- [0x1D551] = 0x02124, -- ℤ bb Z
-}
-
-- todo: tounicode
-- function mathematics.injectfallbacks(target,original)
@@ -758,6 +739,7 @@ function mathematics.finishfallbacks(target,specification,fallbacks)
if gaps then
for unic, unicode in next, gaps do
remap(unic,unicode,true)
+ remap(unicode,unicode,true)
end
end
end
diff --git a/tex/context/base/mkiv/math-ali.mkiv b/tex/context/base/mkiv/math-ali.mkiv
index 49a343ba0..083fb7645 100644
--- a/tex/context/base/mkiv/math-ali.mkiv
+++ b/tex/context/base/mkiv/math-ali.mkiv
@@ -15,38 +15,73 @@
\unprotect
-%D The code here has been moved from other files. Beware: the \MKII\ and
-%D \MKIV\ code is not gathered in files with the same name. In the
-%D meantime this code has been adapted to \MKIV\ but mnore is possible.
+%D The code here has been moved from other files. Beware: the \MKII\ and \MKIV\ code
+%D is not gathered in files with the same name. In the meantime this code has been
+%D adapted to \MKIV\ but more is possible. The code is somewhat complicated by the
+%D fact that alignments are tricky with rspect to tagging.
+
+% export:
+%
+% alignment : ok
+% cases :
+% matrix : ok
+% substack :
%D \macros
%D {definemathalignment, setupmathalignment, startmathalignment}
%D
-%D Modules may provide additional alignment features. The following
-%D mechanisms are provided by the core.
-
-% why all these spans
-
-% n>1 ### needed, strange # interaction in recurse
+%D Modules may provide additional alignment features. The following mechanisms are
+%D provided by the core.
\newtoks\t_math_align_a
\newtoks\t_math_align_b
\newtoks\t_math_align_c
+\newskip\d_math_eqalign_distance
+
+\unexpanded\def\math_eqalign_distance
+ {\relax
+ \ifdim\d_math_eqalign_distance>\zeropoint
+ \hskip\d_math_eqalign_distance
+ \fi
+ \mathalignmentparameter\c!separator
+ \relax}
+
\def\displayopenupvalue{.25\bodyfontsize}
+% \def\math_build_eqalign
+% {\scratchtoks\emptytoks
+% \d_math_eqalign_distance\mathalignmentparameter\c!distance
+% \scratchcounterone\mathalignmentparameter\c!m
+% \scratchcountertwo\mathalignmentparameter\c!n
+% \normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_a}}%
+% \scratchcounter\plusone
+% \dorecurse{\numexpr\scratchcounterone*\scratchcountertwo-\plusone\relax}
+% {\ifnum\scratchcounter=\scratchcountertwo
+% \scratchcounter\plusone
+% \scratchtoks\expandafter{\the\scratchtoks\math_eqalign_distance}%
+% \else
+% \advance\scratchcounter\plusone
+% \fi
+% \normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_b}}}%
+% \normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_c}}}
+
\def\math_build_eqalign
{\scratchtoks\emptytoks
- \dorecurse{\mathalignmentparameter\c!m}\math_build_eqalign_step
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_c}}}
-
-\unexpanded\def\math_build_eqalign_step % make sure no expansion in tracing
- {\ifnum\recurselevel>\plusone
- \scratchtoks\expandafter{\the\scratchtoks\tabskip\mathalignmentparameter\c!distance\aligntab\tabskip\zeropoint}%
- \fi
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_a}}%
- \dorecurse{\numexpr\mathalignmentparameter\c!n-\plusone\relax}
- {\normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_b}}}}
+ \d_math_eqalign_distance\mathalignmentparameter\c!distance
+ \scratchcounterone\mathalignmentparameter\c!m
+ \scratchcountertwo\mathalignmentparameter\c!n
+ \etoksapp\scratchtoks{\the\t_math_align_a}%
+ \scratchcounter\plusone
+ \dorecurse{\numexpr\scratchcounterone*\scratchcountertwo-\plusone\relax}
+ {\ifnum\scratchcounter=\scratchcountertwo
+ \scratchcounter\plusone
+ \etoksapp\scratchtoks{\math_eqalign_distance}%
+ \else
+ \advance\scratchcounter\plusone
+ \fi
+ \etoksapp\scratchtoks{\the\t_math_align_b}}%
+ \etoksapp\scratchtoks{\the\t_math_align_c}}
\def\math_math_in_eqalign#1%
{\startforceddisplaymath
@@ -93,7 +128,7 @@
\aligntab
\math_first_in_eqalign
\hfil
- \math_left_of_equalign
+ \math_left_of_eqalign
\span
\math_math_in_eqalign{\alignmark\alignmark}%
\math_right_of_eqalign
@@ -101,7 +136,7 @@
\t_math_align_b
{\aligntab
\math_next_in_eqalign
- \math_left_of_equalign
+ \math_left_of_eqalign
\span
\math_math_in_eqalign{\alignmark\alignmark}%
\math_right_of_eqalign
@@ -142,7 +177,7 @@
\aligntab
\math_first_in_eqalign
\hfil
- \math_left_of_equalign
+ \math_left_of_eqalign
\span
\math_math_in_eqalign{\alignmark\alignmark}%
\math_right_of_eqalign
@@ -150,7 +185,7 @@
\t_math_align_b
{\aligntab
\math_next_in_eqalign
- \math_left_of_equalign
+ \math_left_of_eqalign
\span
\math_math_in_eqalign{\alignmark\alignmark}%
\math_right_of_eqalign
@@ -201,20 +236,31 @@
\math_halign_checked\expandafter\bgroup\the\scratchtoks\crcr#2\crcr\egroup
\math_finish_eqalign_no}
+\installcorenamespace {mathalignlocation}
+
+\setvalue{\??mathalignlocation\v!top }{\let\math_alignment_halign_method\halign\tpack}
+\setvalue{\??mathalignlocation\v!bottom}{\let\math_alignment_halign_method\halign\vpack}
+\setvalue{\??mathalignlocation\v!center}{\let\math_alignment_halign_method\halign\vcenter}
+
\def\math_both_eqalign_no_aligned#1%
- {\ifmmode
+ {\let\math_alignment_halign_method\math_halign_checked
+ \ifmmode
\the\mathdisplayaligntweaks
\global\mathnumberstatus\plusone
\ifcase\mathraggedstatus
\def\math_finish_eqalign_no{\crcr\egroup}%
- \else
- % we're in a mathbox
- \vcenter\bgroup
+ \else % we're in a mathbox
+ \ifcsname\??mathalignlocation\mathalignmentparameter\c!location\endcsname
+ \lastnamedcs % top|bottom|center as suggested by HM
+ \else
+ \vcenter
+ \fi
+ \bgroup
\def\math_finish_eqalign_no{\crcr\egroup\egroup}%
\fi
\fi
#1%
- \math_halign_checked\expandafter\bgroup\the\scratchtoks\crcr}
+ \math_alignment_halign_method\expandafter\bgroup\the\scratchtoks\crcr}
\def\math_rlap#1%
{\setbox\scratchbox\hbox{#1}%
@@ -236,68 +282,106 @@
%D Here we implement the user interface part. We start with basic math alignments:
-\newcount\c_math_eqalign_column
+\newcount \c_math_eqalign_column
+\newconditional\c_math_eqalign_first
\newtoks \everymathalignment
+\newtoks \everymathalignmentdone
-\def\math_alignment_NC_first#1\NR
- {\glet\math_alignment_NC\math_alignment_NC_rest
- \scratchtoks{\math_number_left_of_eqalign\aligntab#1\NR}% \math_number_left_of_eqalign not used yet
- \dodoubleempty\math_alignment_NC_first_indeed}
+\def\math_alignment_NN
+ {\dodirectdoubleempty\math_alignment_NN_indeed}
-\def\math_alignment_NC_first_indeed[#1][#2]%
- {\strc_formulas_place_number_nested{#1}{#2}\the\scratchtoks}
+\def\math_alignment_NN_indeed[#1][#2]%
+ {\aligntab
+ \strc_formulas_place_number_nested{#1}{#2}}
\def\math_alignment_NR
+ {\dodirectdoubleempty\math_alignment_NR_indeed}
+
+\def\math_alignment_NR_indeed[#1][#2]%
{\aligntab
\dostoptagged % finish cell
+ \strc_formulas_place_number_nested{#1}{#2}%
\math_number_right_of_eqalign
+ \global\settrue\c_math_eqalign_first
\crcr
- \dostoptagged % finish row
- \noalign{\glet\math_alignment_NC\math_alignment_NC_first}} % noalign used for change state, conditional does not work here
+ \dostoptagged} % finish row
-\def\math_alignment_NC_rest
- {\aligntab}
+\def\math_alignment_NC
+ {\relax
+ \ifconditional\c_math_eqalign_first
+ \ifx\p_math_alignment_number\v!auto
+ \strc_formulas_place_number_nested{+}{}%
+ \fi
+ \global\setfalse\c_math_eqalign_first
+ \fi
+ \math_number_left_of_eqalign
+ \aligntab}
\def\math_alignment_EQ
{\NC=}
\appendtoks
- \glet\math_alignment_NC\math_alignment_NC_first
- \unexpanded\def\NC{\math_alignment_NC}% messy, due to lookahead (we cannot use a flag)
- \let\EQ\math_alignment_EQ
- \let\NR\math_alignment_NR
+ \pushmacro\NC
+ \pushmacro\NN
+ \pushmacro\EQ
+ \pushmacro\NR
+ \let\NC\math_alignment_NC
+ \let\NN\math_alignment_NN
+ \let\EQ\math_alignment_EQ
+ \let\NR\math_alignment_NR
+ \global\settrue\c_math_eqalign_first
\to \everymathalignment
+\appendtoks
+ \popmacro\NR
+ \popmacro\EQ
+ \popmacro\NN
+ \popmacro\NC
+\to \everymathalignmentdone
+
\let\math_alignment_snap_start\relax
\let\math_alignment_snap_stop \relax
-% experimental
-
-\def\math_alignment_snap_start
- {\ifgridsnapping
- \snaptogrid[\v!both]\vbox\bgroup
- \fi}
+% % experimental:
+%
+% \def\math_alignment_snap_start
+% {\ifgridsnapping
+% \edef\p_math_alignment_grid{\mathalignmentparameter\c!grid}%
+% \ifx\p_math_alignment_grid\v!no\else
+% \snaptogrid[\p_math_alignment_grid]\vbox\bgroup
+% \fi
+% \fi}
+%
+% \def\math_alignment_snap_stop
+% {\ifgridsnapping
+% \ifx\p_math_alignment_grid\v!no\else
+% \egroup
+% \fi
+% \fi}
+%
+% % doesn't work well, so:
-\def\math_alignment_snap_stop
- {\ifgridsnapping
- \egroup
- \fi}
+\let\math_alignment_snap_start\relax
+\let\math_alignment_snap_stop \relax
% end of experimental
+\newconditional\c_math_alignment_auto_number
+
\unexpanded\def\math_alignment_start#1%
{\edef\currentmathalignment{#1}%
\dosingleempty\math_alignment_start_indeed}
\def\math_alignment_start_indeed[#1]%
{% \begingroup not permitted ($$...assignments...\halign... )
- \math_alignment_snap_start
\iffirstargument
\setupmathalignment[\currentmathalignment][#1]% bad! ungrouped
\fi
+ \math_alignment_snap_start
\the\everymathalignment
\c_math_eqalign_column\zerocount
+ \edef\p_math_alignment_number{\mathalignmentparameter\c!number}%
\processcommacommand
[\mathalignmentparameter\c!align]%
{\advance\c_math_eqalign_column\plusone\math_eqalign_set_column}% takes argument
@@ -310,6 +394,7 @@
{\math_finish_eqalign_no
\dostoptagged
\dostoptagged
+ \the\everymathalignmentdone
\math_alignment_snap_stop}
\installcorenamespace{mathalignment}
@@ -325,7 +410,8 @@
\setupmathalignment
[\c!n=2,
\c!m=1,
- \c!distance=\emwidth]
+ \c!distance=\emwidth,
+ \c!grid=\v!math]
\definemathalignment[align] % default case (this is what amstex users expect)
\definemathalignment[\v!mathalignment] % prefered case (this is cleaner, less clashing)
@@ -344,10 +430,13 @@
{\ifmmode
\let\stopalign\align_math_normal_stop % cannot be an unexpanded def ... lookahead in align
\expandafter\align_math_normal_start
+ \else\ifinformula
+ \let\stopalign\align_math_normal_stop
+ \doubleexpandafter\align_math_normal_start
\else
\let\stopalign\align_text_normal_stop
- \expandafter\align_text_normal_start
- \fi}
+ \doubleexpandafter\align_text_normal_start
+ \fi\fi}
\let\stopalign\relax
@@ -355,10 +444,13 @@
{\ifmmode
\let\stopalignment\align_math_normal_stop % cannot be an unexpanded def ... lookahead in align
\expandafter\align_math_normal_start
+ \else\ifinformula
+ \let\stopalignment\align_math_normal_stop % cannot be an unexpanded def ... lookahead in align
+ \doubleexpandafter\align_math_normal_start
\else
\let\stopalignment\align_text_normal_stop
- \expandafter\align_text_normal_start
- \fi}
+ \doubleexpandafter\align_text_normal_start
+ \fi\fi}
\let\stopalignment\relax
@@ -381,7 +473,7 @@
\dostoptagged % finish cell
\dostarttagged\t!mathtablecell\empty}
-\def\math_left_of_equalign
+\def\math_left_of_eqalign
{\ifcsname\??mathalignmentvariant\number\c_math_eqalign_column\endcsname
\ifcase\lastnamedcs \or \relax \or \hfill \or \hfill \fi
\fi}
@@ -413,9 +505,13 @@
\fi
\fi}
+% \def\math_eqalign_set_column#1% we could just add to the preamble (as with other alignments)
+% {\expandafter\let\csname\??mathalignmentvariant\number\c_math_eqalign_column\expandafter\endcsname
+% \csname\??mathalignmentvariant\ifcsname\??mathalignmentvariant#1\endcsname#1\else\v!normal\fi\endcsname}
+
\def\math_eqalign_set_column#1% we could just add to the preamble (as with other alignments)
- {\expandafter\let\csname\??mathalignmentvariant\number\c_math_eqalign_column\expandafter\endcsname
- \csname\??mathalignmentvariant\ifcsname\??mathalignmentvariant#1\endcsname#1\else\v!normal\fi\endcsname}
+ {\expandafter\chardef\csname\??mathalignmentvariant\number\c_math_eqalign_column\expandafter\expandafter\expandafter\endcsname
+ \ifcsname\??mathalignmentvariant#1\endcsname\lastnamedcs\else\zerocount\fi\relax}
\letvalue{\??mathalignmentvariant\v!normal}\zerocount
\letvalue{\??mathalignmentvariant\v!left }\plusone
@@ -704,7 +800,9 @@
%D \macros
%D {definemathmatrix, setupmathmatrix, startmathmatrix}
%D
-%D Yet another one \unknown
+%D Yet another one \unknown. This time we implement the lot a bit
+%D different which is a side effect of getting the tagging right. In
+%D retrospect the main alignment could be done this way but \unknown
\installcorenamespace{mathmatrix}
@@ -721,117 +819,199 @@
\setvalue {\e!stop \currentmathmatrix}{\math_matrix_stop}% no u else lookahead problem
\to \everydefinemathmatrix
-\let\math_matrix_NC\relax
+\def\math_matrix_start_table
+ {\global\c_math_eqalign_column\zerocount
+ \dostarttagged\t!math\empty
+ \dostarttagged\t!mathtable\empty}
-\unexpanded\def\math_matrix_start#1%
- {\begingroup
- \edef\currentmathmatrix{#1}%
- \dosingleempty\math_matrix_start_indeed}
+\def\math_matrix_stop_table
+ {\dostoptagged
+ \dostoptagged}
-\unexpanded\def\math_matrix_start_indeed[#1]%
- {\iffirstargument
- \setupcurrentmathmatrix[#1]%
+\def\math_matrix_start_row
+ {\noalign{\global\c_math_eqalign_column\zerocount}%
+ \dostarttagged\t!mathtablerow\empty}
+
+\def\math_matrix_stop_row
+ {\dostoptagged}
+
+\unexpanded\def\math_matrix_start_cell
+ {\dostarttagged\t!mathtablecell\empty
+ \hss
+ \math_left_of_eqalign
+ \startimath
+ \math_matrix_set_style
+ \tabskip\zeropoint
+ \everycr\emptytoks}
+
+\unexpanded\def\math_matrix_stop_cell
+ {\stopimath
+ \math_right_of_eqalign
+ \hss
+ \dostoptagged}
+
+% We could construct a preamble with alignment and such embedded but the number
+% of matrices with many rows is normally so low that it doesn't pay of at all.
+
+\unexpanded\def\math_matrix_distance
+ {\relax
+ \ifdim\d_math_eqalign_distance>\zeropoint
+ \hskip\d_math_eqalign_distance
\fi
- % \emptyhbox % noted at 25-05-2014: what was that one doing here? it messed up spacing
- \math_matrix_align_method_analyze
+ \relax}
+
+\def\math_matrix_preamble
+ {\math_matrix_strut
+ \global\advance\c_math_eqalign_column\plusone
+ \math_matrix_start_cell
+ \alignmark\alignmark
+ \math_matrix_stop_cell
+ \aligntab
+ \aligntab
+ \math_matrix_distance
+ \global\advance\c_math_eqalign_column\plusone
+ \math_matrix_start_cell
+ \alignmark\alignmark
+ \math_matrix_stop_cell}
+
+\newconditional\c_math_matrix_first
+
+\def\math_matrix_NR
+ {\aligntab\omit
+ \math_matrix_stop_row
+ \math_matrix_pickup
+ \crcr
+ \math_matrix_start_row}
+
+\def\math_matrix_NC
+ {\ifconditional\c_math_matrix_first
+ \expandafter\math_matrix_NC_yes
+ \else
+ \expandafter\math_matrix_NC_nop
+ \fi}
+
+\def\math_matrix_pickup{\global\settrue \c_math_matrix_first}
+\def\math_matrix_NC_yes{\global\setfalse\c_math_matrix_first}
+\def\math_matrix_NC_nop{\aligntab} % avoids lookahead
+
+% \def\math_matrix_stop_wrapup
+% {\crcr
+% \strut
+% \crcr
+% \noalign{\vskip-\struthtdp}}
+
+\def\math_matrix_start_processing
+ {\dontleavehmode
+ \bgroup
+ \tabskip\zeropoint
+ \math_matrix_pickup
+ \let\NR\math_matrix_NR
+ \let\NC\math_matrix_NC
+ \let\MC\math_matrix_NC
+ %
+ \let\endmath\relax
+ %
+ \setbox\nextbox\vbox\bgroup
+ \math_matrix_start_table
+ \halign \bgroup
+ % preamble
+ \span\math_matrix_preamble
+ % done
+ \crcr
+ \math_matrix_start_row}
+
+\def\math_matrix_stop_processing
+ {%\math_matrix_stop_wrapup % optional
+ \math_matrix_stop_row
+ \egroup
+ \math_matrix_stop_table
+ \egroup
\mathmatrixleft
- % new per 13-10-2014
- \edef\p_strut{\mathmatrixparameter\c!strut}%
+ \math_matrix_finish_nextbox
+ \mathmatrixright
+ \egroup}
+
+\let\math_matrix_strut \strut
+\let\math_matrix_set_style\relax
+
+\def\math_matrix_check_settings
+ {\edef\p_strut{\mathmatrixparameter\c!strut}%
\ifx\p_strut\v!no
- \let\m_matrix_strut\relax
+ \let\math_matrix_strut\relax
\else
- \let\m_matrix_strut\strut
+ \let\math_matrix_strut\strut
\ifx\p_strut\v!yes\else
\spacing\p_strut
\fi
\fi
- %
- \mathmatrixbox\bgroup
- \pushmacro\math_matrix_NC
- \let\endmath\relax
- \def\NC{\math_matrix_NC}%
- \def\MC{\math_matrix_NC\ifmmode\else\startimath\let\endmath\stopimath\fi}%
- \global\let\math_matrix_NC\math_matrix_NC_indeed
- \def\NR{\endmath\global\let\math_matrix_NC\math_matrix_NC_indeed\m_matrix_strut \crcr}%
- \normalbaselines
+ \d_math_eqalign_distance\mathmatrixparameter\c!distance\relax
+ \edef\math_matrix_set_style{\mathmatrixparameter\c!style}}
+
+\def\math_matrix_set_defaults
+ {\normalbaselines % hm, spacing ?
\mathsurround\zeropoint
- \everycr\emptytoks
- \tabskip\zeropoint
- \c_math_eqalign_column\zerocount
- \processcommacommand
- [\mathmatrixparameter\c!align]
- {\advance\c_math_eqalign_column\plusone\math_eqalign_set_column}%
- \scratchcounter\ifnum\c_math_eqalign_column>\zerocount \c_math_eqalign_column \else \plusone \fi
- \global\c_math_eqalign_column\plusone
- \math_matrix_prepare}
+ \tabskip\zeropoint}
-\def\math_matrix_stop
- {\crcr
- % \ifgridsnapping \else
- \mathstrut\crcr
- \noalign{\vskip-\baselineskip}%
- % \fi
- \egroup
- \popmacro\math_matrix_NC
- \egroup
- \mathmatrixright
- \endgroup}
+\def\math_matrix_set_columns_step
+ {\advance\c_math_eqalign_column\plusone
+ %\c_math_matrix_columns\c_math_eqalign_column
+ \math_eqalign_set_column}
-\definemathmatrix[matrix]
-\definemathmatrix[\v!mathmatrix]
+\def\math_matrix_set_columns
+ {\c_math_eqalign_column\zerocount
+ \rawprocesscommacommand[\mathmatrixparameter\c!align]\math_matrix_set_columns_step}
-\def\math_matrix_prepare
- {\t_math_align_a{\strut\math_first_in_eqalign\math_left_of_equalign\span
- \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}%
- \t_math_align_b{\aligntab\hskip\mathmatrixparameter\c!distance
- \math_next_in_eqalign\math_left_of_equalign\span
- \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}%
- \t_math_align_c{\aligntab\aligntab\hskip\mathmatrixparameter\c!distance
- \math_left_of_equalign\span
- \math_text_in_eqalign{\mathmatrixparameter\c!style\alignmark\alignmark}\math_right_of_eqalign}%
- \scratchtoks\emptytoks
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_a}}%
- \dorecurse{\numexpr\scratchcounter-\plusone\relax}
- {\normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_b}}}%
- \normalexpanded{\scratchtoks{\the\scratchtoks\the\t_math_align_c}}%
- \halign \expandafter \bgroup\the\scratchtoks \crcr}
+\unexpanded\def\math_matrix_start#1%
+ {\begingroup
+ \globalpushmacro\c_math_matrix_first
+ \edef\currentmathmatrix{#1}%
+ \dosingleempty\math_matrix_start_indeed}
+
+\unexpanded\def\math_matrix_start_indeed[#1]%
+ {\iffirstargument
+ \setupcurrentmathmatrix[#1]%
+ \fi
+ \math_matrix_check_settings
+ \math_matrix_set_defaults
+ \math_matrix_set_columns
+ \math_matrix_start_processing}
+
+\def\math_matrix_stop
+ {\math_matrix_stop_processing
+ \globalpopmacro\c_math_matrix_first
+ \endgroup}
-\unexpanded\def\math_matrix_NC_indeed
- {\gdef\math_matrix_NC{\endmath\aligntab}}
+% vcenter:
+%
+% delta = (height(v) + depth(v))/2
+% axis = math_axis_size(cur_size)
+% height(v) = delta + axis
+% depth(v) = delta - axis
-\installcorenamespace{mathmatrixalignmethod}
+\installcorenamespace{mathmatrixalignlocation}
\let\mathmatrixleft \empty % experimental hook
\let\mathmatrixright\empty % experimental hook
-\def\math_matrix_process#1#2%
- {\dowithnextbox
- {\scratchdimen\dimexpr(\nextboxdp-\nextboxht)/2 \ifcase#2\or+\mathaxisheight\textfont2\fi\relax
- \ifcase#1\relax\or\lower\scratchdimen\or\or\raise\scratchdimen\fi\hbox\bgroup
- \normalstartimath
- \mathmatrixparameter\c!left
- \vcenter{\unvbox\nextbox}%
- \mathmatrixparameter\c!right
- \normalstopimath
- \egroup}%
- \vbox}
-
-\unexpanded\def\installmathmatrixalignmethod#1#2%
- {\setvalue{\??mathmatrixalignmethod#1}{#2}}
-
-\def\math_matrix_align_method_analyze
- {\csname\??mathmatrixalignmethod\ifcsname\??mathmatrixalignmethod\mathmatrixparameter\c!location\endcsname
- \mathmatrixparameter\c!location
- \else
- \v!normal
- \fi\endcsname}
+\setvalue{\??mathmatrixalignlocation\v!top }{\raise\dimexpr(\nextboxdp-\nextboxht)/2 +\mathaxisheight\textfont\zerocount\relax}
+\setvalue{\??mathmatrixalignlocation\v!high }{\raise\dimexpr(\nextboxdp-\nextboxht)/2\relax}
+\setvalue{\??mathmatrixalignlocation\v!center}{\relax}
+\setvalue{\??mathmatrixalignlocation\v!lohi} {\relax}
+\setvalue{\??mathmatrixalignlocation\v!normal}{\relax}
+\setvalue{\??mathmatrixalignlocation\v!bottom}{\lower\dimexpr(\nextboxdp-\nextboxht)/2 +\mathaxisheight\textfont\zerocount\relax}
+\setvalue{\??mathmatrixalignlocation\v!low }{\lower\dimexpr(\nextboxdp-\nextboxht)/2\relax}
+
+\def\math_matrix_finish_nextbox
+ {\begincsname\??mathmatrixalignlocation\mathmatrixparameter\c!location\endcsname\hbox\bgroup
+ \normalstartimath
+ \mathmatrixparameter\c!left
+ \vcenter{\box\nextbox}%
+ \mathmatrixparameter\c!right
+ \normalstopimath
+ \egroup}
-\installmathmatrixalignmethod\v!top {\def\mathmatrixbox{\math_matrix_process\plusthree\plusone }}
-\installmathmatrixalignmethod\v!high {\def\mathmatrixbox{\math_matrix_process\plusthree\zerocount}}
-\installmathmatrixalignmethod\v!lohi {\def\mathmatrixbox{\math_matrix_process\plustwo \zerocount}}
-\installmathmatrixalignmethod\v!low {\def\mathmatrixbox{\math_matrix_process\plusone \zerocount}}
-\installmathmatrixalignmethod\v!bottom{\def\mathmatrixbox{\math_matrix_process\plusone \plusone }}
-\installmathmatrixalignmethod\v!normal{\def\mathmatrixbox{\math_matrix_process\plustwo \zerocount}} % lohi
+\definemathmatrix[matrix]
+\definemathmatrix[\v!mathmatrix]
%D \startbuffer
%D \placeformula \startformula[-] \startmatrix
@@ -958,6 +1138,84 @@
%D
%D \typebuffer % does not run well: \getbuffer
+%D Handy for the \type {m-matrix} module:
+
+\unexpanded\def\startnamedmatrix
+ {\dodoubleempty\math_matrix_start_named}
+
+\def\math_matrix_start_named[#1][#2]%
+ {\begingroup
+ \edef\currentmathmatrix{#1}%
+ \ifsecondargument
+ \setupcurrentmathmatrix[#2]%
+ \fi
+ \math_matrix_start\currentmathmatrix}
+
+\def\stopnamedmatrix
+ {\math_matrix_stop
+ \endgroup}
+
+%D The following code is derived from Aditya's simplematrix prototype but
+%D adapted to regular mathmatrices (which saves some code so it can go into
+%D the core):
+
+\def\math_matrix_simple_row#1%
+ {\rawprocesscommalist[#1]\math_matrix_simple_col
+ \toksapp\scratchtoks{\NR}}
+
+\def\math_matrix_simple_col#1%
+ {\toksapp\scratchtoks{\NC#1}}
+
+\unexpanded\def\math_matrix_simple[#1][#2]#3%
+ {\begingroup
+ \edef\currentmathmatrix{#1}%
+ \ifsecondargument
+ \setupcurrentmathmatrix[#2]%
+ \fi
+ \scratchtoks\emptytoks
+ \processlist[];\math_matrix_simple_row[#3]%
+ \math_matrix_start\currentmathmatrix
+ \the\scratchtoks
+ \math_matrix_stop
+ \endgroup}
+
+%D We hook it into the normal mathmatrix code:
+
+\appendtoks
+ \edef\p_simplecommand{\mathmatrixparameter\c!simplecommand}%
+ \ifx\p_simplecommand\empty\else
+ \setuevalue{\p_simplecommand}{\dodoubleempty\math_matrix_simple[\currentmathmatrix]}%
+ \fi
+\to \everydefinemathmatrix
+
+%D And predefine some matrices:
+
+\definemathmatrix[matrix:parentheses][\c!left={\left(\mskip\thinmuskip},\c!right={\mskip\thinmuskip\right)},\c!align=\v!middle]
+\definemathmatrix[matrix:brackets] [\c!left={\left[\mskip\thinmuskip},\c!right={\mskip\thinmuskip\right]},\c!align=\v!middle]
+\definemathmatrix[matrix:bars] [\c!left={\left|\mskip\thinmuskip},\c!right={\mskip\thinmuskip\right|},\c!align=\v!middle]
+
+\definemathmatrix[thematrix][matrix:parentheses][\c!simplecommand=thematrix]
+
+%D \startbuffer
+%D \startformula
+%D \thematrix{1,2,3,4;5,6,7,8;9,10,11,12}
+%D \stopformula
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+%D
+%D \startbuffer
+%D \startformula
+%D \startthematrix
+%D \NC 1\NC 2\NC 3\NC 4\NR
+%D \NC 5\NC 6\NC 7\NC 8\NR
+%D \NC 9\NC10\NC11\NC12\NR
+%D \stopthematrix
+%D \stopformula
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
%D \macros
%D {startintertext}
%D
@@ -1015,52 +1273,50 @@
%D
%D \typebuffer which gives \getbuffer
+% no tagging yet : how is it supposed to be coded?
+
\unexpanded\def\startsubstack
{\begingroup
\vcenter\bgroup
\baselineskip\mathstacktotal
\lineskip\mathstackvgap
\lineskiplimit\lineskip
- \let\stopmathmode\relax
- \def\NC{\math_matrix_NC}%
- \def\MC{\math_matrix_NC\startmathmode}%
- \global\let\math_matrix_NC\math_matrix_NC_indeed
- \def\NR
- {\stopmathmode
- \global\let\math_matrix_NC\math_matrix_NC_indeed
- \crcr}%
\mathsurround\zeropoint
\everycr\emptytoks
+ \let\NC\relax
+ \let\MC\relax
+ \let\NR\crcr
\halign\bgroup\hfil\normalstartimath\scriptstyle\alignmark\alignmark\normalstopimath\hfil\crcr}
-\def\stopsubstack
+\def\stopsubstack % todo: \unexpanded and delayed
{\crcr
\egroup
\egroup
\endgroup}
-%D \macros
-%D {bordermatrix}
-%D
-%D In \PLAIN\ \TEX\ the width of a parenthesis is stored in
-%D the \DIMENSION\ \type{\mathparentwd}. This value is derived from
-%D the width of \type{\tenrm B}, so let's take care of it now:
-
-\ifx\mathparentwd\undefined \newdimen\mathparentwd \fi
-
-\let\normalbordermatrix\bordermatrix % move that code to here instead
-
-\def\bordermatrix
- {\begingroup
- \setbox\scratchbox\hbox{\mr\char"239C}%
- \global\mathparentwd\wd\scratchbox
- \endgroup
- \normalbordermatrix}
-
-% to be tested
+% %D \macros
+% %D {bordermatrix}
+% %D
+% %D In \PLAIN\ \TEX\ the width of a parenthesis is stored in
+% %D the \DIMENSION\ \type{\mathparentwd}. This value is derived from
+% %D the width of \type{\tenrm B}, so let's take care of it now:
+%
+% \ifx\mathparentwd\undefined \newdimen\mathparentwd \fi
+%
+% \let\normalbordermatrix\bordermatrix % move that code to here instead
+%
+% \unexpanded\def\bordermatrix
+% {\begingroup
+% \setbox\scratchbox\hbox{\mr\char"239C}%
+% \global\mathparentwd\wd\scratchbox
+% \endgroup
+% \normalbordermatrix}
%
% \def\bordermatrix
-% {\begingroup\mr\global\mathparentwd\fontcharwd\font"239C\relax\endgroup
+% {\begingroup
+% \mr
+% \global\mathparentwd\fontcharwd\font"239C\relax
+% \endgroup
% \normalbordermatrix}
%D \macros{overset, underset}
@@ -1135,10 +1391,12 @@
{\setvalue{\??mathinnerstart#1}{#2}%
\setvalue{\??mathinnerstop #1}{#3}}
-\newif\iftracemath
+\installtextracker
+ {formulas.boxes}
+ {\let\math_hbox\ruledhbox}
+ {\let\math_hbox\hbox}
-\def\math_hbox
- {\iftracemath\ruledhbox\else\hbox\fi}
+\let\math_hbox\hbox
\newconstant\mathraggedstatus % normal left center right
\newconstant\mathnumberstatus % nothing normal shift_right
@@ -1170,6 +1428,8 @@
\newdimen \d_strc_math_display_width
\newbox \b_strc_math_display
\newconstant \c_strc_formulas_frame_mode
+\newdimen \d_strc_math_indent
+\newconditional\c_strc_math_indent
\let\d_strc_math_framed_width\displaywidth
@@ -1184,14 +1444,32 @@
% mode: 0=no frame | 1=number inside frame | 2=number outside frame
\def\strc_math_flush_aligned
- {\ifcase\mathraggedstatus\or\hfill\or\hfill\fi
- \box\b_strc_math_display
- \ifcase\mathraggedstatus\or\or\hfill\or\hfill\fi}
+ {\ifcase\c_strc_math_vertical
+ \ifcase\mathraggedstatus\or\hfill\or\hfill\fi
+ \box\b_strc_math_display
+ \ifcase\mathraggedstatus\or\or\hfill\or\hfill\fi
+ \else
+ \ifconditional\c_strc_math_indent
+ \ifdim\d_strc_math_indent=\zeropoint\else
+ \hangafter\plusone
+ \hangindent\d_strc_math_indent
+ \fi
+ \fi
+ \edef\p_interlinespace{\formulaparameter\c!interlinespace}%
+ \ifx\p_interlinespace\empty\else\baselineskip\p_interlinespace\fi
+ \global\d_strc_math_indent\zeropoint
+ \ifcase\mathraggedstatus\or\raggedleft\or\raggedcenter\or\raggedright\fi
+ \unhbox\b_strc_math_display
+ \fi}
\def\strc_math_flush_box_normal
- {\hbox to \displaywidth\bgroup
+ {\ifcase\c_strc_math_vertical
+ \hbox to \displaywidth\bgroup
+ \strc_math_flush_aligned
+ \egroup
+ \else
\strc_math_flush_aligned
- \egroup}
+ \fi}
\def\strc_math_flush_box_framed_common
{\setformulaframedparameter\c!align{\formulaparameter\c!align}%
@@ -1420,74 +1698,85 @@
\startforceddisplaymath}
\def\strc_math_flush_number_no
- {\ifconditional\c_strc_math_display_overflow
- \ifcase\c_strc_formulas_frame_mode
- \strc_math_flush_box_normal
- \else
- \strc_math_flush_box_framed_fit_inline
- \fi
- \else
- \ifcase\c_strc_formulas_frame_mode
- %\ifconditional\c_strc_formulas_tight
- % \strc_math_flush_box_normal
- %\else
+ {\ifcase\c_strc_math_vertical
+ \ifconditional\c_strc_math_display_overflow
+ \ifcase\c_strc_formulas_frame_mode
\strc_math_flush_box_normal
- %\fi
- \else
- \ifconditional\c_strc_formulas_tight
+ \else
\strc_math_flush_box_framed_fit_inline
+ \fi
+ \else
+ \ifcase\c_strc_formulas_frame_mode
+ %\ifconditional\c_strc_formulas_tight
+ % \strc_math_flush_box_normal
+ %\else
+ \strc_math_flush_box_normal
+ %\fi
\else
- \strc_math_flush_box_framed_display
+ \ifconditional\c_strc_formulas_tight
+ \strc_math_flush_box_framed_fit_inline
+ \else
+ \strc_math_flush_box_framed_display
+ \fi
\fi
\fi
+ \else
+ \strc_math_flush_box
\fi}
\def\strc_math_flush_number_left
- {\ifconditional\c_strc_math_display_overflow
- \ifcase\c_strc_formulas_frame_mode
- \strc_math_number_left_overflow
- \or
- \strc_math_number_left_overflow_outside
- \or
- \strc_math_number_left_overflow_inside
+ {\ifcase\c_strc_math_vertical
+ \ifconditional\c_strc_math_display_overflow
+ \ifcase\c_strc_formulas_frame_mode
+ \strc_math_number_left_overflow
+ \or
+ \strc_math_number_left_overflow_outside
+ \or
+ \strc_math_number_left_overflow_inside
+ \fi
+ \else
+ \ifcase\c_strc_formulas_frame_mode
+ \strc_math_number_left_normal
+ \or
+ \strc_math_number_left_normal_outside
+ \or
+ \strc_math_number_left_normal_inside
+ \fi
\fi
\else
- \ifcase\c_strc_formulas_frame_mode
- \strc_math_number_left_normal
- \or
- \strc_math_number_left_normal_outside
- \or
- \strc_math_number_left_normal_inside
- \fi
+ \box\b_strc_formulas_number
+ \hfill
+ \strc_math_flush_aligned
\fi}
\def\strc_math_flush_number_right
- {\ifconditional\c_strc_math_display_overflow
- \ifcase\c_strc_formulas_frame_mode
- \strc_math_number_right_overflow
- \or
- \strc_math_number_right_overflow_outside
- \or
- \strc_math_number_right_overflow_inside
+ {\ifcase\c_strc_math_vertical
+ \ifconditional\c_strc_math_display_overflow
+ \ifcase\c_strc_formulas_frame_mode
+ \strc_math_number_right_overflow
+ \or
+ \strc_math_number_right_overflow_outside
+ \or
+ \strc_math_number_right_overflow_inside
+ \fi
+ \else
+ \ifcase\c_strc_formulas_frame_mode
+ \strc_math_number_right_normal
+ \or
+ \strc_math_number_right_normal_outside
+ \or
+ \strc_math_number_right_normal_inside
+ \fi
\fi
\else
- \ifcase\c_strc_formulas_frame_mode
- \strc_math_number_right_normal
- \or
- \strc_math_number_right_normal_outside
- \or
- \strc_math_number_right_normal_inside
- \fi
+ \strc_math_flush_aligned
+ \hfill
+ \box\b_strc_formulas_number
\fi}
\unexpanded\def\strc_math_box_stop
{\stopforceddisplaymath
\egroup
- % % not needed, attribute driven
- % \ifgridsnapping
- % \snaptogrid[\v!math]\vbox
- % \fi
- % \bgroup
% check number
\d_strc_math_number_width\wd\b_strc_formulas_number
%
@@ -1514,7 +1803,13 @@
\noindent % \noindentation % not \dontleavehmode
\hskip\d_strc_formulas_display_margin_left % was kern but that doesn't indent
\strc_math_traced_state
- \hbox to \displaywidth \bgroup
+ \ifcase\c_strc_math_vertical
+ \hbox to \displaywidth \bgroup
+ \or
+ \vbox \bgroup \hsize\displaywidth
+ \or
+ \bgroup \hsize\displaywidth
+ \fi
\ifcase\mathnumberstatus
\strc_math_flush_box
\or % status 1
@@ -1540,7 +1835,11 @@
\else
\strc_math_flush_box
\fi
- % \egroup
+ \ifcase\c_strc_math_vertical
+ \or
+ \or
+ \par
+ \fi
\egroup}
\defineinnermathhandler\v!left {\strc_math_box_start\plusone }{\strc_math_box_stop}
@@ -1580,7 +1879,24 @@
\setupmathematics
[\c!textdistance=\zeropoint]
-%D For documentation, see \type {math-mkiv.tex}.
+%D This is an experiment. No fancy spacing and alignments here. If we ever
+%D go that route it might result in incompatible rendering.
+
+\unexpanded\def\startsplitformula
+ {\ifhmode
+ \par
+ \fi
+ \begingroup
+ \beforedisplayspace
+ % subset of \everydisplay:
+ \attribute \mathmodeattribute \plusone
+ \settrue \indisplaymath
+ % end of subset
+ \informulatrue}
+
+\unexpanded\def\stopsplitformula
+ {\afterdisplayspace
+ \endgroup}
\protect \endinput
diff --git a/tex/context/base/mkiv/math-arr.mkiv b/tex/context/base/mkiv/math-arr.mkiv
index 28aad8ada..a0dda66b9 100644
--- a/tex/context/base/mkiv/math-arr.mkiv
+++ b/tex/context/base/mkiv/math-arr.mkiv
@@ -99,7 +99,7 @@
\ifdim\wd4=\dimen0\else
\setbox4\hbox to \dimen0{#3\displaystyle}%
\fi
- \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}\limits\normalsuperscript{\box0}\normalsubscript{\box2}}%
+ \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}\limits\normalsuperscript{\box0}\normalsubscript{\box2}}% pack ?
\endgroup}
\let\math_arrows_construct_single\math_arrows_construct
@@ -279,8 +279,8 @@
\unexpanded\def\specrightarrowfill {\defaultmtharrowfill \relbar \relbar \rightarrow}
\unexpanded\def\specleftarrowfill {\defaultmtharrowfill \leftarrow \relbar \relbar}
-\unexpanded\def\rightarrowfill {\specrightarrowfill \textstyle}
-\unexpanded\def\leftarrowfill {\specleftarrowfill \textstyle}
+\unexpanded\def\rightarrowfill {\specrightarrowfill\textstyle}
+\unexpanded\def\leftarrowfill {\specleftarrowfill \textstyle}
\unexpanded\def\equalfill {\defaultmtharrowfill \Relbar \Relbar \Relbar}
\unexpanded\def\Rightarrowfill {\defaultmtharrowfill \Relbar \Relbar \Rightarrow}
diff --git a/tex/context/base/mkiv/math-def.mkiv b/tex/context/base/mkiv/math-def.mkiv
index a8af5af5c..641f7411e 100644
--- a/tex/context/base/mkiv/math-def.mkiv
+++ b/tex/context/base/mkiv/math-def.mkiv
@@ -30,6 +30,9 @@
\definemathcommand [arccos] [nolop] {\mfunctionlabeltext{arccos}}
\definemathcommand [arcsin] [nolop] {\mfunctionlabeltext{arcsin}}
\definemathcommand [arctan] [nolop] {\mfunctionlabeltext{arctan}}
+\definemathcommand [arccosh] [nolop] {\mfunctionlabeltext{arccosh}}
+\definemathcommand [arcsinh] [nolop] {\mfunctionlabeltext{arcsinh}}
+\definemathcommand [arctanh] [nolop] {\mfunctionlabeltext{arctanh}}
\definemathcommand [acos] [nolop] {\mfunctionlabeltext{acos}}
\definemathcommand [asin] [nolop] {\mfunctionlabeltext{asin}}
\definemathcommand [atan] [nolop] {\mfunctionlabeltext{atan}}
@@ -133,9 +136,9 @@
\definemathcommand [centercolon] [rel] {\mathstylevcenteredhbox\colon}
\definemathcommand [colonminus] [rel] {\centercolon\colonsep\mathrel{-}}
-\definemathcommand [minuscolon] [rel] {\mathrel{-}\colonsep\centercolon} % native char
-\definemathcommand [colonequals] [rel] {\centercolon\colonsep=} % native char
-\definemathcommand [equalscolon] [rel] {=\centercolon\colonsep} % native char
+%definemathcommand [minuscolon] [rel] {\mathrel{-}\colonsep\centercolon} % native char
+%definemathcommand [colonequals] [rel] {\centercolon\colonsep=} % native char
+%definemathcommand [equalscolon] [rel] {=\centercolon\colonsep} % native char
\definemathcommand [colonapprox] [rel] {\centercolon\colonsep\approx}
\definemathcommand [approxcolon] [rel] {\approx\centercolon\colonsep}
\definemathcommand [colonsim] [rel] {\centercolon\colonsep\sim}
diff --git a/tex/context/base/mkiv/math-del.mkiv b/tex/context/base/mkiv/math-del.mkiv
index 269b6946a..dd847d2f1 100644
--- a/tex/context/base/mkiv/math-del.mkiv
+++ b/tex/context/base/mkiv/math-del.mkiv
@@ -69,13 +69,16 @@
% A new experiment:
-\installcorenamespace {mathextensible}
+% Hm, we already have \definemathextensible so we need to preserve that one. We now
+% use \definemathdelimiter.
-\installcommandhandler \??mathextensible {mathextensible} \??mathextensible
+\installcorenamespace {mathdelimiter}
-\let\setupmathextensibles\setupmathextensible
+\installcommandhandler \??mathdelimiter {mathdelimiter} \??mathdelimiter
-\setupmathextensibles
+\let\setupmathdelimiters\setupmathdelimiter
+
+\setupmathdelimiters
[\c!symbol=0,
\c!command=\v!yes,
\c!factor=1.5,
@@ -85,50 +88,50 @@
\c!depth=\exheight]
\appendtoks
- \edef\p_command{\mathextensibleparameter\c!command}%
+ \edef\p_command{\mathdelimiterparameter\c!command}%
\ifx\p_command\v!yes
- \setuevalue\currentmathextensible{\math_fenced_extensible[\currentmathextensible]}%
+ \setuevalue\currentmathdelimiter{\math_fenced_extensible[\currentmathdelimiter]}%
\fi
-\to \everydefinemathextensible
+\to \everydefinemathdelimiter
\unexpanded\def\math_fenced_extensible
{\dodoubleempty\math_fenced_extensible_indeed}
\unexpanded\def\math_fenced_extensible_indeed[#1][#2]%
{\mathop
- {\edef\currentmathextensible{#1}%
- \edef\p_factor{\mathextensibleparameter\c!factor}%
+ {\edef\currentmathdelimiter{#1}%
+ \edef\p_factor{\mathdelimiterparameter\c!factor}%
\ifsecondargument
\doifassignmentelse{#2}
- {\setupcurrentmathextensible[#2]%
- \edef\p_factor{\mathextensibleparameter\c!factor}}%
+ {\setupcurrentmathdelimiter[#2]%
+ \edef\p_factor{\mathdelimiterparameter\c!factor}}%
{\edef\p_factor{#2}}%
\else
- \edef\p_factor{\mathextensibleparameter\c!factor}%
+ \edef\p_factor{\mathdelimiterparameter\c!factor}%
\fi
- \edef\p_exact{\mathextensibleparameter\c!exact}%
- \edef\p_axis {\mathextensibleparameter\c!axis}%
- \edef\p_leftoffset{\mathextensibleparameter\c!leftoffset}%
- \edef\p_rightoffset{\mathextensibleparameter\c!rightoffset}%
+ \edef\p_exact{\mathdelimiterparameter\c!exact}%
+ \edef\p_axis {\mathdelimiterparameter\c!axis}%
+ \edef\p_leftoffset{\mathdelimiterparameter\c!leftoffset}%
+ \edef\p_rightoffset{\mathdelimiterparameter\c!rightoffset}%
\ifx\p_leftoffset\empty\else
\mskip\p_leftoffset
\fi
\Uvextensible
\ifx\p_exact\v!yes exact \fi
\ifx\p_axis \v!yes axis \fi
- height \p_factor\dimexpr\mathextensibleparameter\c!height\relax
- depth \p_factor\dimexpr\mathextensibleparameter\c!depth \relax
- \Udelimiter\zerocount\zerocount\mathextensibleparameter\c!symbol
+ height \p_factor\dimexpr\mathdelimiterparameter\c!height\relax
+ depth \p_factor\dimexpr\mathdelimiterparameter\c!depth \relax
+ \Udelimiter\zerocount\zerocount\mathdelimiterparameter\c!symbol
\relax
\ifx\p_rightoffset\empty\else
\mskip\p_rightoffset
\fi}}
-\let\mathextensible\math_fenced_extensible
+\let\mathdelimiter\math_fenced_extensible
-\definemathextensible[integral][\c!symbol="222B]
+\definemathdelimiter[integral][\c!symbol="222B]
-% \setupmathextensible[integral][rightoffset=-3mu,exact=yes,factor=2]
+% \setupmathdelimiter[integral][rightoffset=-3mu,exact=yes,factor=2]
%
% \let\inlineint \int
% \let\displayint\integral
diff --git a/tex/context/base/mkiv/math-dim.lua b/tex/context/base/mkiv/math-dim.lua
index ba0235a5b..72b9d7e50 100644
--- a/tex/context/base/mkiv/math-dim.lua
+++ b/tex/context/base/mkiv/math-dim.lua
@@ -6,9 +6,6 @@ if not modules then modules = { } end modules ['math-dim'] = {
license = "see context related readme files"
}
--- Beware: only Taco and Ulrik really understands in depth what these dimensions
--- do so if you run into problems ask on the context list.
-
-- The radical_rule value is also used as a trigger. In luatex the accent
-- placement happens either the opentype way (using top_accent cum suis) or the
-- traditional way. In order to determine what method to use the \Umathradicalrule
@@ -18,6 +15,8 @@ if not modules then modules = { } end modules ['math-dim'] = {
-- which case the engine takes the rulethickness. In c-speak:
--
-- int compat_mode = (radical_rule(cur_style) == undefined_math_parameter) ;
+--
+-- In the meantime things have been updated and we now have two code paths.
local abs, next = math.abs, next
diff --git a/tex/context/base/mkiv/math-dir.lua b/tex/context/base/mkiv/math-dir.lua
index cba991b84..759f1e797 100644
--- a/tex/context/base/mkiv/math-dir.lua
+++ b/tex/context/base/mkiv/math-dir.lua
@@ -33,7 +33,6 @@ local getid = nuts.getid
local getlist = nuts.getlist
local getattr = nuts.getattr
-local setfield = nuts.setfield
local setchar = nuts.setchar
local setlist = nuts.setlist
diff --git a/tex/context/base/mkiv/math-ext.lua b/tex/context/base/mkiv/math-ext.lua
index b00d6cde2..a4b865713 100644
--- a/tex/context/base/mkiv/math-ext.lua
+++ b/tex/context/base/mkiv/math-ext.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['math-ext'] = {
license = "see context related readme files"
}
+local rawget = rawget
+
local trace_virtual = false trackers.register("math.virtual", function(v) trace_virtual = v end)
local basename = file.basename
@@ -18,33 +20,40 @@ local report_math = logs.reporter("mathematics")
mathematics.extras = mathematics.extras or { }
local extras = mathematics.extras
-characters.math = characters.math or { }
-local mathdata = characters.math
+local mathplus = { }
local chardata = characters.data
+local mathpairs = characters.mathpairs
+
+-- todo: store them and skip storage if already stored
+-- todo: make a char-ctx.lua (or is this already side effect of save in format)
-function extras.add(unicode,t) -- todo: if already stored ...
- local min, max = mathematics.extrabase, mathematics.privatebase - 1
- -- if mathdata[unicode] or chardata[unicode] then
- -- report_math("extra %U overloads existing character",unicode)
- -- end
+local function addextra(unicode)
+ local min = mathematics.extrabase
+ local max = mathematics.privatebase - 1
if unicode >= min and unicode <= max then
- mathdata[unicode], chardata[unicode] = t, t
+ if chardata[unicode] then
+ mathplus[unicode] = true
+ else
+ report_math("extra %U is not a registered code point",unicode)
+ end
else
report_math("extra %U should be in range %U - %U",unicode,min,max)
end
end
+extras.add = addextra
+
function extras.copy(target,original)
local characters = target.characters
local properties = target.properties
local parameters = target.parameters
- for unicode, extradesc in next, mathdata do
- -- always, because in an intermediate step we can have a non math font
- local extrachar = characters[unicode]
+ for unicode in table.sortedhash(mathplus) do
+ local extradesc = chardata[unicode]
local nextinsize = extradesc.nextinsize
if nextinsize then
- local first = 1
- local charused = unicode
+ local extrachar = characters[unicode]
+ local first = 1
+ local charused = unicode
if not extrachar then
for i=1,#nextinsize do
local slot = nextinsize[i]
@@ -104,94 +113,7 @@ end
utilities.sequencers.appendaction(mathactions,"system","mathematics.extras.copy")
--- 0xFE302 -- 0xFE320 for accents (gone with new lm/gyre)
---
--- extras.add(0xFE302, {
--- category="mn",
--- description="WIDE MATHEMATICAL HAT",
--- direction="nsm",
--- linebreak="cm",
--- mathclass="topaccent",
--- mathname="widehat",
--- mathstretch="h",
--- unicodeslot=0xFE302,
--- nextinsize={ 0x00302, 0x0005E },
--- } )
---
--- extras.add(0xFE303, {
--- category="mn",
--- cjkwd="a",
--- description="WIDE MATHEMATICAL TILDE",
--- direction="nsm",
--- linebreak="cm",
--- mathclass="topaccent",
--- mathname="widetilde",
--- mathstretch="h",
--- unicodeslot=0xFE303,
--- nextinsize={ 0x00303, 0x0007E },
--- } )
-
--- 0xFE321 -- 0xFE340 for missing characters
-
-extras.add(0xFE321, {
- category="sm",
- description="MATHEMATICAL SHORT BAR",
- -- direction="on",
- -- linebreak="nu",
- mathclass="relation",
- mathname="mapstochar",
- unicodeslot=0xFE321,
-} )
-
-extras.add(0xFE322, {
- category="sm",
- description="MATHEMATICAL LEFT HOOK",
- mathclass="relation",
- mathname="lhook",
- unicodeslot=0xFE322,
-} )
-
-extras.add(0xFE323, {
- category="sm",
- description="MATHEMATICAL RIGHT HOOK",
- mathclass="relation",
- mathname="rhook",
- unicodeslot=0xFE323,
-} )
-
-extras.add(0xFE324, {
- category="sm",
- description="MATHEMATICAL SHORT BAR MIRRORED",
--- direction="on",
--- linebreak="nu",
- mathclass="relation",
- mathname="mapsfromchar",
- unicodeslot=0xFE324,
-} )
-
---~ extras.add(0xFE304, {
---~ category="sm",
---~ description="TOP AND BOTTOM PARENTHESES",
---~ direction="on",
---~ linebreak="al",
---~ mathclass="doubleaccent",
---~ mathname="doubleparent",
---~ unicodeslot=0xFE304,
---~ accents={ 0x023DC, 0x023DD },
---~ } )
-
---~ extras.add(0xFE305, {
---~ category="sm",
---~ description="TOP AND BOTTOM BRACES",
---~ direction="on",
---~ linebreak="al",
---~ mathclass="doubleaccent",
---~ mathname="doublebrace",
---~ unicodeslot=0xFE305,
---~ accents={ 0x023DE, 0x023DF },
---~ } )
-
---~ \Umathchardef\braceld="0 "1 "FF07A
---~ \Umathchardef\bracerd="0 "1 "FF07B
---~ \Umathchardef\bracelu="0 "1 "FF07C
---~ \Umathchardef\braceru="0 "1 "FF07D
+extras.add(0xFE321)
+extras.add(0xFE322)
+extras.add(0xFE323)
+extras.add(0xFE324)
diff --git a/tex/context/base/mkiv/math-fbk.lua b/tex/context/base/mkiv/math-fbk.lua
index 564ece8d7..7aa8c437f 100644
--- a/tex/context/base/mkiv/math-fbk.lua
+++ b/tex/context/base/mkiv/math-fbk.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['math-fbk'] = {
license = "see context related readme files"
}
+local next, type = next, type
+
local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end)
local report_fallbacks = logs.reporter("math","fallbacks")
@@ -13,6 +15,7 @@ local report_fallbacks = logs.reporter("math","fallbacks")
local formatters = string.formatters
local fastcopy = table.fastcopy
local byte = string.byte
+local sortedhash = table.sortedhash
local fallbacks = { }
mathematics.fallbacks = fallbacks
@@ -26,120 +29,114 @@ local lastmathids = fonts.hashes.lastmathids
-- that order we could use their id's .. i.e. we could always add a font
-- table with those id's .. in fact, we could also add a whole lot more
-- as it doesn't hurt
---
--- todo: use index 'true when luatex provides that feature (on the agenda)
--- to be considered:
---
--- in luatex provide reserve_id (and pass id as field of tfmdata)
--- in context define three sizes but pass them later i.e. do virtualize afterwards
+local scripscriptdelayed = { } -- 1.005 : add characters later
+local scriptdelayed = { } -- 1.005 : add characters later
function fallbacks.apply(target,original)
- local mathparameters = target.mathparameters -- why not hasmath
- if mathparameters then
- local characters = target.characters
- local parameters = target.parameters
- local mathsize = parameters.mathsize
- local size = parameters.size
- local usedfonts = target.fonts
- if not usedfonts then
- usedfonts = { }
- target.fonts = usedfonts
- end
- -- This is not okay yet ... we have no proper way to refer to 'self'
- -- otherwise I will make my own id allocator).
- local self = #usedfonts == 0 and font.nextid() or nil -- will be true
- local textid, scriptid, scriptscriptid
- local textindex, scriptindex, scriptscriptindex
- local textdata, scriptdata, scriptscriptdata
- if mathsize == 3 then
- -- scriptscriptsize
- -- textid = nil -- self
- -- scriptid = nil -- no smaller
- -- scriptscriptid = nil -- no smaller
- textid = self
- scriptid = self
- scriptscriptid = self
- elseif mathsize == 2 then
- -- scriptsize
- -- textid = nil -- self
- textid = self
- scriptid = lastmathids[3]
- scriptscriptid = lastmathids[3]
- else
- -- textsize
- -- textid = nil -- self
- textid = self
- scriptid = lastmathids[2]
- scriptscriptid = lastmathids[3]
- end
- if textid then
- textindex = #usedfonts + 1
- usedfonts[textindex] = { id = textid }
--- textdata = identifiers[textid] or target
- textdata = target
- else
- textdata = target
- end
- if scriptid then
- scriptindex = #usedfonts + 1
- usedfonts[scriptindex] = { id = scriptid }
- scriptdata = identifiers[scriptid]
- else
- scriptindex = textindex
- scriptdata = textdata
- end
- if scriptscriptid then
- scriptscriptindex = #usedfonts + 1
- usedfonts[scriptscriptindex] = { id = scriptscriptid }
- scriptscriptdata = identifiers[scriptscriptid]
- else
- scriptscriptindex = scriptindex
- scriptscriptdata = scriptdata
- end
- -- report_fallbacks("used textid: %S, used script id: %S, used scriptscript id: %S",textid,scriptid,scriptscriptid)
- local data = {
- textdata = textdata,
- scriptdata = scriptdata,
- scriptscriptdata = scriptscriptdata,
- textindex = textindex,
- scriptindex = scriptindex,
- scriptscriptindex = scriptscriptindex,
- textid = textid,
- scriptid = scriptid,
- scriptscriptid = scriptscriptid,
- characters = characters,
- unicode = k,
- target = target,
- original = original,
- size = size,
- mathsize = mathsize,
- }
- target.mathrelation = data
- -- inspect(usedfonts)
- for k, v in next, virtualcharacters do
- if not characters[k] then
- local tv = type(v)
- local cd = nil
- if tv == "table" then
- cd = v
- elseif tv == "number" then
- cd = characters[v]
- elseif tv == "function" then
- cd = v(data)
- end
- if cd then
- characters[k] = cd
- else
- -- something else
- end
- if trace_fallbacks and characters[k] then
- report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
- end
+ local mathparameters = target.mathparameters
+ if not mathparameters or not next(mathparameters) then
+ return
+ end
+ -- we also have forcedsize ... at this moment we already passed through
+ -- constructors.scale so we have this set
+ local parameters = target.parameters
+ local mathsize = parameters.mathsize
+ if mathsize < 1 or mathsize > 3 then
+ return
+ end
+ local characters = target.characters
+ local size = parameters.size
+ local usedfonts = target.fonts
+ if not usedfonts then
+ usedfonts = { { id = 0 } } -- we need at least one entry (automatically done anyway)
+ target.fonts = usedfonts
+ end
+ -- not used
+ local textid, scriptid, scriptscriptid
+ local textindex, scriptindex, scriptscriptindex
+ local textdata, scriptdata, scriptscriptdata
+ if mathsize == 3 then
+ -- scriptscriptsize
+ textid = 0
+ scriptid = 0
+ scriptscriptid = 0
+ elseif mathsize == 2 then
+ -- scriptsize
+ textid = 0
+ scriptid = lastmathids[3] or 0
+ scriptscriptid = lastmathids[3] or 0
+ else
+ -- textsize
+ textid = 0
+ scriptid = lastmathids[2] or 0
+ scriptscriptid = lastmathids[3] or 0
+ end
+ if textid and textid ~= 0 then
+ textindex = #usedfonts + 1
+ textdata = target
+ usedfonts[textindex] = { id = textid }
+ else
+ textdata = target
+ end
+ if scriptid and scriptid ~= 0 then
+ scriptindex = #usedfonts + 1
+ scriptdata = identifiers[scriptid]
+ usedfonts[scriptindex] = { id = scriptid }
+ else
+ scriptindex = textindex
+ scriptdata = textdata
+ end
+ if scriptscriptid and scriptscriptid ~= 0 then
+ scriptscriptindex = #usedfonts + 1
+ scriptscriptdata = identifiers[scriptscriptid]
+ usedfonts[scriptscriptindex] = { id = scriptscriptid }
+ else
+ scriptscriptindex = scriptindex
+ scriptscriptdata = scriptdata
+ end
+ -- report_fallbacks("used textid: %S, used script id: %S, used scriptscript id: %S",textid,scriptid,scriptscriptid)
+ local data = {
+ textdata = textdata,
+ scriptdata = scriptdata,
+ scriptscriptdata = scriptscriptdata,
+ textindex = textindex,
+ scriptindex = scriptindex,
+ scriptscriptindex = scriptscriptindex,
+ textid = textid,
+ scriptid = scriptid,
+ scriptscriptid = scriptscriptid,
+ characters = characters,
+ unicode = k,
+ target = target,
+ original = original,
+ size = size,
+ mathsize = mathsize,
+ }
+ target.mathrelation = data
+ --
+ for k, v in sortedhash(virtualcharacters) do
+ if not characters[k] then
+ local tv = type(v)
+ local cd = nil
+ if tv == "table" then
+ cd = v
+ elseif tv == "number" then
+ cd = characters[v]
+ elseif tv == "function" then
+ cd = v(data) -- ,k
+ end
+ if cd then
+ characters[k] = cd
+ else
+ -- something else
+ end
+ if trace_fallbacks and characters[k] then
+ report_fallbacks("extending math font %a with %U",target.properties.fullname,k)
end
end
- data.unicode = nil
end
+ data.unicode = nil
end
utilities.sequencers.appendaction("aftercopyingcharacters","system","mathematics.fallbacks.apply")
@@ -160,7 +157,7 @@ end
local function raised(data,down)
local replacement = data.replacement
- local character = data.scriptdata.characters[replacement]
+ local character = data.scriptdata.characters[replacement]
if character then
return {
width = character.width,
@@ -169,6 +166,7 @@ local function raised(data,down)
commands = {
{ "down", down and data.size/4 or -data.size/2 }, -- maybe exheight
reference(data.scriptindex,replacement)
+ -- { "slot", data.scriptindex or 0, char } -- hm, data.mathrelation.scriptindex
}
}
end
@@ -232,35 +230,18 @@ end
local addextra = mathematics.extras.add
-addextra(0xFE350, {
- category="sm",
- description="MATHEMATICAL DOUBLE ARROW LEFT END",
- mathclass="relation",
- mathname="ctxdoublearrowfillleftend",
- unicodeslot=0xFE350,
-} )
-
-addextra(0xFE351, {
- category="sm",
- description="MATHEMATICAL DOUBLE ARROW MIDDLE PART",
- mathclass="relation",
- mathname="ctxdoublearrowfillmiddlepart",
- unicodeslot=0xFE351,
-} )
-
-addextra(0xFE352, {
- category="sm",
- description="MATHEMATICAL DOUBLE ARROW RIGHT END",
- mathclass="relation",
- mathname="ctxdoublearrowfillrightend",
- unicodeslot=0xFE352,
-} )
+addextra(0xFE350) -- MATHEMATICAL DOUBLE ARROW LEFT END
+addextra(0xFE351) -- MATHEMATICAL DOUBLE ARROW MIDDLE PART
+addextra(0xFE352) -- MATHEMATICAL DOUBLE ARROW RIGHT END
local push = { "push" }
local pop = { "pop" }
local leftarrow = { "char", 0x2190 }
local relbar = { "char", 0x2212 }
local rightarrow = { "char", 0x2192 }
+-- local leftarrow = { "slot", 0, 0x2190 }
+-- local relbar = { "slot", 0, 0x2212 }
+-- local rightarrow = { "slot", 0, 0x2192 }
virtualcharacters[0xFE350] = function(data)
-- return combined(data,0x2190,0x2212) -- leftarrow relbar
@@ -334,10 +315,10 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
local characters = target.characters
local olddata = characters[oldchr]
-- brrr ... pagella has only next
- if olddata and not olddata.commands and olddata.width > 0 then
+ if olddata and not olddata.commands then -- not: and olddata.width > 0
local addprivate = fonts.helpers.addprivate
if swap then
- swap = characters[swap]
+ swap = characters[swap]
height = swap.depth or 0
depth = 0
else
@@ -347,6 +328,7 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height + (offset or 0)}
local newdata = {
commands = { correction, { "slot", 1, oldchr } },
+ -- commands = { correction, { "slot", 0, oldchr } },
width = olddata.width,
height = height,
depth = depth,
@@ -359,11 +341,13 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
if oldnextdata then
local newnextdata = {
commands = { correction, { "slot", 1, nextglyph } },
+ -- commands = { correction, { "slot", 0, nextglyph } },
width = oldnextdata.width,
height = height,
depth = depth,
}
- local newnextglyph = addprivate(target,formatters["M-N-%H"](nextglyph),newnextdata)
+-- local newnextglyph = addprivate(target,formatters["M-N-%H"](nextglyph),newnextdata)
+ local newnextglyph = addprivate(target,nil,newnextdata)
newdata.next = newnextglyph
local nextnextglyph = oldnextdata.next
if nextnextglyph == nextglyph then
@@ -389,11 +373,13 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
if olddata then
local newdata = {
commands = { correction, { "slot", 1, oldglyph } },
+ -- commands = { correction, { "slot", 0, oldglyph } },
width = olddata.width,
height = height,
depth = depth,
}
- hvi.glyph = addprivate(target,formatters["M-H-%H"](oldglyph),newdata)
+-- hvi.glyph = addprivate(target,formatters["M-H-%H"](oldglyph),newdata)
+ hvi.glyph = addprivate(target,nil,newdata)
else
report_fallbacks("error in fallback: no valid horiz_variants, slot %X, index %i",oldglyph,i)
end
@@ -405,24 +391,38 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
end
-virtualcharacters[0x203E] = function(data) -- could be FE33E instead
+virtualcharacters[0x203E] = function(data)
local target = data.target
local height, depth = 0, 0
- local mathparameters = target.mathparameters
- if mathparameters then
- height = mathparameters.OverbarVerticalGap
- depth = mathparameters.UnderbarVerticalGap
- else
+-- local mathparameters = target.mathparameters
+-- if mathparameters then
+-- height = mathparameters.OverbarVerticalGap
+-- depth = mathparameters.UnderbarVerticalGap
+-- else
height = target.parameters.xheight/4
depth = height
- end
+-- end
return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth,nil,nil,0x203E)
end
-virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient
-virtualcharacters[0xFE33F] = virtualcharacters[0x203E] -- convenient
+-- virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient
+-- virtualcharacters[0xFE33F] = virtualcharacters[0x203E] -- convenient
--- spacing
+virtualcharacters[0xFE33E] = function(data)
+ local target = data.target
+ local height = 0
+ local depth = target.parameters.xheight/4
+ return accent_to_extensible(target,0xFE33E,data.original,0x0305,height,depth,nil,nil,0x203E)
+end
+
+virtualcharacters[0xFE33F] = function(data)
+ local target = data.target
+ local height = target.parameters.xheight/8
+ local depth = height
+ return accent_to_extensible(target,0xFE33F,data.original,0x0305,height,depth,nil,nil,0x203E)
+end
+
+-- spacing (no need for a cache of widths)
local c_zero = byte('0')
local c_period = byte('.')
@@ -431,7 +431,7 @@ local function spacefraction(data,fraction)
local width = fraction * data.target.parameters.space
return {
width = width,
- commands = { right = width }
+ commands = { { "right", width } }
}
end
@@ -439,7 +439,7 @@ local function charfraction(data,char)
local width = data.target.characters[char].width
return {
width = width,
- commands = { right = width }
+ commands = { { "right", width } }
}
end
@@ -447,7 +447,7 @@ local function quadfraction(data,fraction)
local width = fraction * data.target.parameters.quad
return {
width = width,
- commands = { right = width }
+ commands = { { "right", width } }
}
end
@@ -480,17 +480,17 @@ local function smashed(data,unicode,swap,private)
end
end
-addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
-addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
-addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3DE) -- EXTENSIBLE OF 0x03DE
+addextra(0xFE3DC) -- EXTENSIBLE OF 0x03DC
+addextra(0xFE3B4) -- EXTENSIBLE OF 0x03B4
virtualcharacters[0xFE3DE] = function(data) return smashed(data,0x23DE,0x23DF,0xFE3DE) end
virtualcharacters[0xFE3DC] = function(data) return smashed(data,0x23DC,0x23DD,0xFE3DC) end
virtualcharacters[0xFE3B4] = function(data) return smashed(data,0x23B4,0x23B5,0xFE3B4) end
-addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
-addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
-addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3DF) -- EXTENSIBLE OF 0x03DF
+addextra(0xFE3DD) -- EXTENSIBLE OF 0x03DD
+addextra(0xFE3B5) -- EXTENSIBLE OF 0x03B5
virtualcharacters[0xFE3DF] = function(data) local c = data.target.characters[0x23DF] if c then c.unicode = 0x23DF return c end end
virtualcharacters[0xFE3DD] = function(data) local c = data.target.characters[0x23DD] if c then c.unicode = 0x23DD return c end end
@@ -498,8 +498,8 @@ virtualcharacters[0xFE3B5] = function(data) local c = data.target.characters[0x2
-- todo: add some more .. numbers might change
-addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h", mathclass = "topaccent" } )
-addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE302) -- EXTENSIBLE OF 0x0302
+addextra(0xFE303) -- EXTENSIBLE OF 0x0303
local function smashed(data,unicode,private)
local target = data.target
@@ -519,18 +519,17 @@ virtualcharacters[0xFE303] = function(data) return smashed(data,0x0303,0xFE303)
-- these primes in fonts are a real mess .. kind of a dead end, so don't wonder about
-- the values below
--- todo: check tounicodes
-
local function smashed(data,unicode,optional)
local oldchar = data.characters[unicode]
if oldchar then
- local xheight = data.target.parameters.xheight
- local height = 1.2 * xheight
- local shift = oldchar.height - height
+ -- local height = 1.25 * data.target.parameters.xheight
+ local height = 0.85 * data.target.mathparameters.AccentBaseHeight
+ local shift = oldchar.height - height
local newchar = {
commands = {
{ "down", shift },
- { "char", unicode },
+ { "slot", 0, unicode },
+-- { "char", unicode },
},
height = height,
width = oldchar.width,
@@ -541,38 +540,14 @@ local function smashed(data,unicode,optional)
end
end
--- -- relocate all but less flexible so not used .. instead some noad hackery plus
--- -- the above
---
--- local function smashed(data,unicode,optional)
--- local oldchar = data.characters[unicode]
--- if oldchar then
--- local xheight = data.target.parameters.xheight
--- local height = oldchar.height
--- local shift = oldchar.height < 1.5*xheight and -(1.8*xheight-height) or 0
--- local newchar = {
--- commands = {
--- { "down", shift },
--- { "char", unicode },
--- },
--- unicode = unicode,
--- height = height,
--- width = oldchar.width,
--- }
--- return newchar
--- elseif not optional then
--- report_fallbacks("missing %U prime in font %a",unicode,data.target.properties.fullname)
--- end
--- end
-
-addextra(0xFE932, { description="SMASHED PRIME 0x02032", unicodeslot=0xFE932 } )
-addextra(0xFE933, { description="SMASHED PRIME 0x02033", unicodeslot=0xFE933 } )
-addextra(0xFE934, { description="SMASHED PRIME 0x02034", unicodeslot=0xFE934 } )
-addextra(0xFE957, { description="SMASHED PRIME 0x02057", unicodeslot=0xFE957 } )
+addextra(0xFE932) -- SMASHED PRIME 0x02032
+addextra(0xFE933) -- SMASHED PRIME 0x02033
+addextra(0xFE934) -- SMASHED PRIME 0x02034
+addextra(0xFE957) -- SMASHED PRIME 0x02057
-addextra(0xFE935, { description="SMASHED BACKWARD PRIME 0x02035", unicodeslot=0xFE935 } )
-addextra(0xFE936, { description="SMASHED BACKWARD PRIME 0x02036", unicodeslot=0xFE936 } )
-addextra(0xFE937, { description="SMASHED BACKWARD PRIME 0x02037", unicodeslot=0xFE937 } )
+addextra(0xFE935) -- SMASHED BACKWARD PRIME 0x02035
+addextra(0xFE936) -- SMASHED BACKWARD PRIME 0x02036
+addextra(0xFE937) -- SMASHED BACKWARD PRIME 0x02037
virtualcharacters[0xFE932] = function(data) return smashed(data,0x02032) end
virtualcharacters[0xFE933] = function(data) return smashed(data,0x02033) end
@@ -583,9 +558,41 @@ virtualcharacters[0xFE935] = function(data) return smashed(data,0x02035,true) en
virtualcharacters[0xFE936] = function(data) return smashed(data,0x02036,true) end
virtualcharacters[0xFE937] = function(data) return smashed(data,0x02037,true) end
+local hack = nil
+
+function mathematics.getridofprime(target,original)
+-- local mathsize = specification.mathsize
+-- if mathsize == 1 or mathsize == 2 or mathsize == 3) then
+ local mathparameters = original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local changed = original.changed
+ if changed then
+ hack = changed[0x02032]
+ changed[0x02032] = nil
+ changed[0x02033] = nil
+ changed[0x02034] = nil
+ changed[0x02057] = nil
+ changed[0x02035] = nil
+ changed[0x02036] = nil
+ changed[0x02037] = nil
+ end
+ end
+end
+
+function mathematics.setridofprime(target,original)
+ local mathparameters = original.mathparameters
+ if mathparameters and next(mathparameters) and original.changed then
+ target.characters[0xFE931] = target.characters[hack or 0x2032]
+ hack = nil
+ end
+end
+
+utilities.sequencers.appendaction("beforecopyingcharacters","system","mathematics.getridofprime")
+utilities.sequencers.appendaction("aftercopyingcharacters", "system","mathematics.setridofprime")
+
-- actuarian (beware: xits has an ugly one)
-addextra(0xFE940, { category = "mn", description="SMALL ANNUITY SYMBOL", unicodeslot=0xFE940, mathclass="topaccent", mathname="smallactuarial" })
+addextra(0xFE940) -- SMALL ANNUITY SYMBOL
local function actuarian(data)
local characters = data.target.characters
@@ -612,3 +619,49 @@ end
virtualcharacters[0x020E7] = actuarian -- checked
virtualcharacters[0xFE940] = actuarian -- unchecked
+
+local function equals(data,unicode,snippet,advance,n) -- mathpair needs them
+ local characters = data.target.characters
+ local parameters = data.target.parameters
+ local basechar = characters[snippet]
+ local advance = advance * parameters.quad
+ return {
+ unicode = unicode,
+ width = n*basechar.width + (n-1)*advance,
+ commands = {
+ { "char", snippet },
+ { "right", advance },
+ { "char", snippet },
+ n > 2 and { "right", advance } or nil,
+ n > 2 and { "char", snippet } or nil,
+ },
+ }
+end
+
+virtualcharacters[0x2A75] = function(data) return equals(data,0x2A75,0x003D, 1/5,2) end -- ==
+virtualcharacters[0x2A76] = function(data) return equals(data,0x2A76,0x003D, 1/5,3) end -- ===
+virtualcharacters[0x2980] = function(data) return equals(data,0x2980,0x007C,-1/8,3) end -- |||
+
+-- addextra(0xFE941) -- EXTREMELY IDENTICAL TO
+--
+-- virtualcharacters[0xFE941] = function(data) -- this character is only needed for mathpairs
+-- local characters = data.target.characters
+-- local parameters = data.target.parameters
+-- local basechar = characters[0x003D]
+-- local width = basechar.width or 0
+-- local height = basechar.height or 0
+-- local depth = basechar.depth or 0
+-- return {
+-- unicode = 0xFE941,
+-- width = width,
+-- height = height, -- we cheat (no time now)
+-- depth = depth, -- we cheat (no time now)
+-- commands = {
+-- { "down", - height/2 }, -- sort of works
+-- { "char", 0x003D },
+-- { "right", -width },
+-- { "down", height }, -- sort of works
+-- { "char", 0x003D },
+-- },
+-- }
+-- end
diff --git a/tex/context/base/mkiv/math-fen.mkiv b/tex/context/base/mkiv/math-fen.mkiv
index 320dffeb8..a32ea410e 100644
--- a/tex/context/base/mkiv/math-fen.mkiv
+++ b/tex/context/base/mkiv/math-fen.mkiv
@@ -15,6 +15,14 @@
\unprotect
+% maybe always "method=auto" for:
+%
+% \switchtobodyfont[cambria]
+% \ruledhbox{$f(x)$}
+% \ruledhbox{\mathdelimitersmode6$f\left(x\right)$}
+% \ruledhbox{\mathdelimitersmode7$f\left(x\right)$}
+% \ruledhbox{$f\left(\frac{1}{x}\right)$}
+
% todo: mathstyle
% \definemathfence [fancybracket] [bracket] [command=yes,color=blue]
@@ -38,12 +46,14 @@
\let\setupmathfences\setupmathfence
\setupmathfences
- [\c!left=,
+ [\c!method=, % maybe always \v!auto
+ \c!left=,
\c!right=,
\c!middle=,
\c!mathstyle=,
\c!color=,
\c!command=,
+ \c!mathclass=,
\c!factor=\v!auto]
\appendtoks
@@ -61,17 +71,40 @@
{\ifx#1\empty
#2.%
\else
+ \edef\p_mathclass{\mathfenceparameter\c!mathclass}%
\edef\p_factor{\mathfenceparameter\c!factor}%
\ifx\p_factor\empty
- #2%
+ \ifx\p_mathclass\empty
+ #2%
+ \else
+ #3%
+ \s!class\p_mathclass
+ \fi
\else\ifx\p_factor\v!auto
- #2%
+ \ifx\p_mathclass\empty
+ #2%
+ \else
+ #3%
+ \s!class\p_mathclass
+ \fi
\else\ifx\p_factor\v!none
- #3\s!height\zeropoint\s!depth\zeropoint\s!axis
- #2%
+ #3%
+ \s!height\zeropoint
+ \s!depth\zeropoint
+ \ifx\p_mathclass\empty\else
+ \s!class\p_mathclass
+ \fi
+ \s!axis
+ % #2%
\else
\scratchdimen\dimexpr\p_factor\bodyfontsize/2\relax
- #3\s!height\scratchdimen\s!depth\scratchdimen\s!axis
+ #3%
+ \s!height\scratchdimen
+ \s!depth\scratchdimen
+ \ifx\p_mathclass\empty\else
+ \s!class\p_mathclass
+ \fi
+ \s!axis
\fi\fi\fi
\Udelimiter#4\fam#1\relax
\fi}
@@ -94,7 +127,7 @@
\def\math_fenced_middle
{\edef\p_middle
- {\mathfenceparameter\c!middle}%
+ {\mathfenceparameter\c!middle}%
\mskip\thinmuskip
\math_fenced_color_push
% \normalmiddle\ifx\p_middle\empty.\else\Udelimiter\plusfour\fam\p_middle\relax\fi
@@ -771,14 +804,14 @@
\unexpanded\def\stopcheckedfences
{\endgroup}
-\appendtoks
+% \appendtoks
% maybe: safeguard against overloading
%
% \let\left \math_fences_used_left
% \let\right \math_fences_used_right
% \let\middle \math_fences_used_middle
% \let\leftorright\math_fences_used_both
-\to \everymathematics
+% \to \everymathematics
\appendtoks
\ifx\currentmathfence\empty
@@ -786,6 +819,19 @@
\fi
\to \everysetupmathfence
+\newconstant\c_math_fences_delimiters_mode \c_math_fences_delimiters_mode"16 % \numexpr"02+"04+"10\relax
+
+%unexpanded\def\enableautofencemode {\mathdelimitersmode\plussix} % the shift (1) is too fragile
+\unexpanded\def\enableautofencemode {\mathdelimitersmode\c_math_fences_delimiters_mode}
+
+\unexpanded\def\disableautofencemode{\mathdelimitersmode\zerocount}
+
+\appendtoks
+ \ifx\currentmathfence\empty
+ \doifelse{\mathfenceparameter\c!method}\v!auto\enableautofencemode\disableautofencemode
+ \fi
+\to \everysetupmathfence
+
% some day default: \setupmathfences[\c!state=\v!auto]
%D The next characters were used for constructing nicer extensibles but
diff --git a/tex/context/base/mkiv/math-frc.lua b/tex/context/base/mkiv/math-frc.lua
index 639edc94b..5c4879527 100644
--- a/tex/context/base/mkiv/math-frc.lua
+++ b/tex/context/base/mkiv/math-frc.lua
@@ -6,15 +6,15 @@ if not modules then modules = { } end modules ['math-frc'] = {
license = "see context related readme files"
}
-local utfchar = utf.char
+local utfchar = utf.char
-local context = context
+local context = context
local variables = interfaces.variables
-local v_no = variables.no
-local v_yes = variables.yes
+local v_no = variables.no
+local v_yes = variables.yes
-local resolved = {
+local resolved = {
[0x007B] = "\\{",
[0x007D] = "\\}",
}
diff --git a/tex/context/base/mkiv/math-frc.mkiv b/tex/context/base/mkiv/math-frc.mkiv
index 16ea6e9e1..9a5ce62b0 100644
--- a/tex/context/base/mkiv/math-frc.mkiv
+++ b/tex/context/base/mkiv/math-frc.mkiv
@@ -26,7 +26,7 @@
%D This module is reimplemented in \MKIV\ style.
-\registerctxluafile{math-frc}{1.001}
+\registerctxluafile{math-frc}{}
%D \macros
%D {frac, xfrac, xxfrac}
@@ -195,6 +195,11 @@
{\begingroup
\edef\currentmathfraction{#1}%
%
+ \edef\p_math_fraction_fences{\mathfractionparameter\c!fences}%
+ \ifx\p_math_fraction_fences\empty \else
+ \math_fenced_fenced_start\p_math_fraction_fences
+ \fi
+ %
\d_math_fraction_margin\mathfractionparameter\c!margin
%
\edef\p_math_fractions_color{\mathfractionparameter\c!color}%
@@ -212,6 +217,12 @@
\expandafter\math_frac_colored
\fi}
+\unexpanded\def\math_frac_wrapup
+ {\ifx\p_math_fraction_fences\empty \else
+ \math_fenced_fenced_stop\p_math_fraction_fences
+ \fi
+ \endgroup}
+
\unexpanded\def\math_frac_colored#1#2%
{\savecolor
\colo_helpers_activate\p_math_fractions_color
@@ -222,11 +233,18 @@
% we use utfchar anyway so we can as well do all at the lua end
+\def\math_frac_no_delim{0x2E}
+
\def\math_frac_command
{\clf_mathfraction
{\mathfractionparameter\c!rule}%
- \mathfractionparameter\c!left\space
- \mathfractionparameter\c!right\space
+ \ifx\p_math_fraction_fences\empty
+ \mathfractionparameter\c!left \space
+ \mathfractionparameter\c!right\space
+ \else
+ \math_frac_no_delim\space
+ \math_frac_no_delim\space
+ \fi
\dimexpr\mathfractionparameter\c!rulethickness\relax
\relax}
@@ -235,6 +253,8 @@
% also makes testing easier. When left and right margins are needed we might merge the
% variants again. After all, these are not real installers.
+% the denominator is in cramped!
+
\setvalue{\??mathfractionalternative\v!inner}%
{\ifcase\d_math_fraction_margin
\expandafter\math_fraction_inner_normal
@@ -265,7 +285,8 @@
\math_frac_command
{\usemathstyleparameter\mathfractionparameter{\m_fractions_strut_bot#2}}%
}%
- }\endgroup}
+ }%
+ \math_frac_wrapup}
\def\math_fraction_outer_normal#1#2%
{\Ustack{%
@@ -275,7 +296,8 @@
\math_frac_command
{\m_fractions_strut_bot#2}%
}%
- }\endgroup}
+ }%
+ \math_frac_wrapup}
\def\math_fraction_both_normal#1#2%
{\Ustack{%
@@ -285,7 +307,8 @@
\math_frac_command
{\usemathstyleparameter\mathfractionparameter\m_fractions_strut_bot#2}%
}%
- }\endgroup}
+ }%
+ \math_frac_wrapup}
\def\math_fraction_inner_margin#1#2%
{\Ustack{%
@@ -298,7 +321,8 @@
\usemathstyleparameter\mathfractionparameter{\m_fractions_strut_bot#2}%
\kern\d_math_fraction_margin}%
}%
- }\endgroup}
+ }%
+ \math_frac_wrapup}
\def\math_fraction_outer_margin#1#2%
{\Ustack{%
@@ -312,7 +336,8 @@
\m_fractions_strut_bot#2%
\kern\d_math_fraction_margin}%
}%
- }\endgroup}
+ }%
+ \math_frac_wrapup}
\def\math_fraction_both_margin#1#2%
{\Ustack{%
@@ -326,8 +351,8 @@
\usemathstyleparameter\mathfractionparameter\m_fractions_strut_bot#2%
\kern\d_math_fraction_margin}%
}%
- }\endgroup}
-
+ }%
+ \math_frac_wrapup}
\definemathfraction[xfrac] [\c!alternative=\v!inner,\c!mathstyle=\s!script]
\definemathfraction[xxfrac][\c!alternative=\v!inner,\c!mathstyle=\s!scriptscript]
@@ -370,6 +395,47 @@
\fi
\to \everymathematics
+% theshold is new!
+
+\let\math_fraction_set_threshold_inline \relax
+\let\math_fraction_set_threshold_display\relax
+
+\appendtoks
+ \math_fraction_set_threshold_inline
+ \math_fraction_set_threshold_display
+\to \everymathematics
+
+\appendtoks
+ \ifx\currentmathfraction\empty
+ \edef\p_threshold{\mathfractionparameter\c!inlinethreshold}%
+ \ifx\p_threshold\empty
+ \let\math_fraction_set_threshold_inline\relax
+ \else\ifx\p_threshold\v!auto
+ \let\math_fraction_set_threshold_inline\relax
+ \else
+ \let\math_fraction_set_threshold_inline\math_fraction_set_theshold_inline
+ \fi\fi
+ \edef\p_threshold{\mathfractionparameter\c!displaythreshold}%
+ \ifx\p_threshold\empty
+ \let\math_fraction_set_threshold_display\relax
+ \else\ifx\p_threshold\v!auto
+ \let\math_fraction_set_threshold_display\relax
+ \else
+ \let\math_fraction_set_threshold_display\math_fraction_set_theshold_display
+ \fi\fi
+ \fi
+\to \everysetupmathfraction
+
+\def\math_fraction_set_theshold_inline
+ {\edef\p_threshold{\mathfractionparameter\c!inlinethreshold}%
+ \Umathfractiondelsize\textstyle \p_threshold\dimexpr\textface\relax
+ \Umathfractiondelsize\scriptstyle \p_threshold\dimexpr\scriptface\relax
+ \Umathfractiondelsize\scriptscriptstyle\p_threshold\dimexpr\scriptscriptface\relax}
+
+\def\math_fraction_set_theshold_display
+ {\edef\p_threshold{\mathfractionparameter\c!displaythreshold}%
+ \Umathfractiondelsize\displaystyle \p_threshold\dimexpr\textface\relax}
+
%D \macros
%D {dfrac, tfrac, frac, dbinom, tbinom, binom}
%D
@@ -451,9 +517,23 @@
% \unexpanded\def\dbinom#1#2{{\displaystyle{{#1}\normalabovewithdelims()\zeropoint{#2}}}}
% \unexpanded\def\tbinom#1#2{{\textstyle {{#1}\normalabovewithdelims()\zeropoint{#2}}}}
-\definemathfraction[binom] [\c!alternative=\v!outer,\c!rule=\v!no,\c!left=0x28,\c!right=0x29,\c!mathstyle=\s!auto]
-\definemathfraction[dbinom][\c!alternative=\v!outer,\c!rule=\v!no,\c!left=0x28,\c!right=0x29,\c!mathstyle=\s!display]
-\definemathfraction[tbinom][\c!alternative=\v!outer,\c!rule=\v!no,\c!left=0x28,\c!right=0x29,\c!mathstyle=\s!text]
+\definemathfraction
+ [binom]
+ [\c!alternative=\v!outer,
+ \c!rule=\v!no,
+ \c!left=0x28,
+ \c!right=0x29,
+ \c!mathstyle=\s!auto]
+
+\definemathfraction
+ [dbinom]
+ [binom]
+ [\c!mathstyle=\s!display]
+
+\definemathfraction
+ [tbinom]
+ [binom]
+ [\c!mathstyle=\s!text]
%D \macros
%D {cfrac}
diff --git a/tex/context/base/mkiv/math-ini.lua b/tex/context/base/mkiv/math-ini.lua
index 2cb4e2413..b79ef8c8c 100644
--- a/tex/context/base/mkiv/math-ini.lua
+++ b/tex/context/base/mkiv/math-ini.lua
@@ -15,9 +15,11 @@ if not modules then modules = { } end modules ['math-ini'] = {
-- to the fam when set ... we use other means .. ok, we could use it for spacing but
-- then we also have to set the other characters (only a subset done now)
+local next, type = next, type
local formatters, find = string.formatters, string.find
local utfchar, utfbyte, utflength = utf.char, utf.byte, utf.length
-local floor = math.floor
+----- floor = math.floor
+local sortedhash = table.sortedhash
local toboolean = toboolean
local context = context
@@ -132,11 +134,39 @@ local extensibles = allocate {
table.setmetatableindex(extensibles,function(t,k) t[k] = 0 return 0 end)
-mathematics.extensibles = extensibles
-mathematics.classes = classes
-mathematics.codes = codes
------------.accents = codes
-mathematics.families = families
+local virtualized = allocate {
+}
+
+function mathematics.virtualize(unicode,virtual)
+
+ local function virtualize(k,v)
+ local c = virtualized[k]
+ if c == v then
+ report_math("character %C is already virtualized to %C",k,v)
+ elseif c then
+ report_math("character %C is already virtualized to %C, ignoring mapping to %C",k,c,v)
+ else
+ virtualized[k] = v
+ end
+ end
+
+ if type(unicode) == "table" then
+ for k, v in next, unicode do
+ virtualize(k,v)
+ end
+ elseif type(unicode) == "number" and type(virtual) == "number" then
+ virtualize(unicode,virtual)
+ -- else
+ -- error
+ end
+end
+
+mathematics.extensibles = extensibles
+mathematics.classes = classes
+mathematics.codes = codes
+-----------.accents = codes
+mathematics.families = families
+mathematics.virtualized = virtualized
-- there will be proper functions soon (and we will move this code in-line)
-- no need for " in class and family (saves space)
@@ -257,7 +287,7 @@ function mathematics.define(family)
family = family or 0
family = families[family] or family
local data = characters.data
- for unicode, character in next, data do
+ for unicode, character in sortedhash(data) do
local symbol = character.mathsymbol
local mset, dset = true, true
if symbol then
@@ -272,7 +302,8 @@ function mathematics.define(family)
end
local spec = other.mathspec
if spec then
- for i, m in next, spec do
+ for i=1,#spec do
+ local m = spec[i]
local class = m.class
if class then
class = classes[class] or class -- no real checks needed
@@ -284,14 +315,29 @@ function mathematics.define(family)
local mathclass = character.mathclass
local mathspec = character.mathspec
if mathspec then
- for i, m in next, mathspec do
+ if mathclass then
+ local name = character.mathname
+ if name then
+ report_math("fatal error, conlicting mathclass and mathspec for %C",unicode)
+ os.exit()
+ else
+ local class = classes[mathclass] or mathclass -- no real checks needed
+ if not class then
+ if trace_defining then
+ report("unknown",family,unicode)
+ end
+ else
+ if trace_defining then
+ report(class,family,unicode)
+ end
+ mset, dset = setmathcharacter(class,family,unicode,unicode,mset,dset)
+ end
+ end
+ end
+ for i=1,#mathspec do
+ local m = mathspec[i]
local name = m.name
local class = m.class
- if not class then
- class = mathclass
- elseif not mathclass then
- mathclass = class
- end
if class then
class = classes[class] or class -- no real checks needed
if name then
@@ -300,7 +346,7 @@ function mathematics.define(family)
end
setmathsymbol(name,class,family,unicode)
else
- name = class == classes.variable or class == classes.number and character.adobename
+ name = (class == classes.variable or class == classes.number) and character.adobename -- bad
if name and trace_defining then
report(class,family,unicode,name)
end
@@ -308,17 +354,22 @@ function mathematics.define(family)
mset, dset = setmathcharacter(class,family,unicode,m.unicode or unicode,mset,dset) -- see solidus
end
end
- end
- if mathclass then
+ elseif mathclass then
local name = character.mathname
local class = classes[mathclass] or mathclass -- no real checks needed
- if name == false then
+ if not class then
+ if trace_defining then
+ report("unknown",family,unicode,name)
+ end
+ elseif name == false then
if trace_defining then
report(class,family,unicode,name)
end
- mset, dset = setmathcharacter(class,family,unicode,mset,dset)
+ mset, dset = setmathcharacter(class,family,unicode,unicode,mset,dset)
else
- name = name or character.contextname
+ -- if not name then
+ -- name = character.contextname -- too dangerous, we loose textslash and a few more
+ -- end
if name then
if trace_defining then
report(class,family,unicode,name)
diff --git a/tex/context/base/mkiv/math-ini.mkiv b/tex/context/base/mkiv/math-ini.mkiv
index 8c682bdcb..17d900d74 100644
--- a/tex/context/base/mkiv/math-ini.mkiv
+++ b/tex/context/base/mkiv/math-ini.mkiv
@@ -64,18 +64,19 @@
% test [[\char948 \cldcontext{utf.char(948)}]]
% test $[[\char948 \cldcontext{utf.char(948)}]]$
-\registerctxluafile{math-ini}{1.001}
-\registerctxluafile{math-dim}{1.001}
-\registerctxluafile{math-act}{1.001}
-\registerctxluafile{math-ext}{1.001}
-\registerctxluafile{math-vfu}{1.001}
-\registerctxluafile{math-ttv}{1.001}
-\registerctxluafile{math-map}{1.001}
-\registerctxluafile{math-ren}{1.001}
-\registerctxluafile{math-noa}{1.001}
-\registerctxluafile{math-tag}{1.001}
-\registerctxluafile{math-fbk}{1.001}
-\registerctxluafile{math-dir}{1.001}
+\registerctxluafile{math-ini}{}
+\registerctxluafile{math-dim}{}
+\registerctxluafile{math-act}{}
+\registerctxluafile{math-ext}{}
+\registerctxluafile{math-vfu}{}
+\registerctxluafile{math-ttv}{}
+\registerctxluafile{math-map}{optimize}
+\registerctxluafile{math-ren}{}
+\registerctxluafile{math-noa}{optimize}
+\registerctxluafile{math-tag}{}
+\registerctxluafile{math-fbk}{}
+\registerctxluafile{math-dir}{}
+\registerctxluafile{math-spa}{}
%D A starter:
%D
@@ -121,6 +122,7 @@
\definesystemattribute[mathkernpairs] [public]
\definesystemattribute[mathbidi] [public]
\definesystemattribute[mathdomain] [public]
+\definesystemattribute[mathcollapsing] [public]
\definesystemattribute[displaymath] [public]
@@ -167,6 +169,17 @@
{\endgroup
\stopimath}
+\unexpanded\def\startpickupmath % for the moment private
+ {\ifconditional\indisplaymath
+ \startforceddisplaymath
+ \let\stoppickupmath\stopforceddisplaymath
+ \else
+ \startimath
+ \let\stoppickupmath\stopimath
+ \fi}
+
+\let\stoppickupmath\relax
+
% \unexpanded\def\rawmathcharacter#1% slow but only for tracing
% {\begingroup
% \ifmmode
@@ -288,31 +301,52 @@
% e.g.: \definemathematics[i:mp][setups=i:tight,openup=yes]
-\newmuskip\defaultthickmuskip \defaultthickmuskip 5mu plus 5mu
-\newmuskip\defaultmedmuskip \defaultmedmuskip 4mu plus 2mu minus 4mu
-\newmuskip\defaultthinmuskip \defaultthinmuskip 3mu
+\newmuskip\defaultthickmuskip \defaultthickmuskip 5mu plus 5mu
+\newmuskip\defaultmedmuskip \defaultmedmuskip 4mu plus 2mu minus 4mu
+\newmuskip\defaultthinmuskip \defaultthinmuskip 3mu
-\newmuskip\halfthickmuskip \halfthickmuskip 2.5mu plus 2.5mu
-\newmuskip\halfmedmuskip \halfmedmuskip 2.0mu plus 1.0mu minus 2.0mu
-\newmuskip\halfthinmuskip \halfthinmuskip 1.5mu
+\newmuskip\halfthickmuskip \halfthickmuskip 2.5mu plus 2.5mu
+\newmuskip\halfmedmuskip \halfmedmuskip 2.0mu plus 1.0mu minus 2.0mu
+\newmuskip\halfthinmuskip \halfthinmuskip 1.5mu
-\newcount \defaultrelpenalty \defaultrelpenalty 500
-\newcount \defaultbinoppenalty \defaultbinoppenalty 700
+\newcount \defaultrelpenalty \defaultrelpenalty 500
+\newcount \defaultbinoppenalty \defaultbinoppenalty 700
+\newcount \defaultprerelpenalty \defaultprerelpenalty -100
+\newcount \defaultprebinoppenalty \defaultprebinoppenalty -100
+
+% we need to control these otherwise:
+%
+% \prerelpenalty \defaultprerelpenalty
+% \prebinoppenalty\defaultprebinoppenalty
\startsetups math:spacing:default
- \thickmuskip \defaultthickmuskip
- \medmuskip \defaultmedmuskip
- \thinmuskip \defaultthinmuskip
- \relpenalty \defaultrelpenalty
- \binoppenalty \defaultbinoppenalty
+ \thickmuskip \defaultthickmuskip
+ \medmuskip \defaultmedmuskip
+ \thinmuskip \defaultthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+ \prebinoppenalty\maxdimen
+ \prerelpenalty \maxdimen
+\stopsetups
+
+\startsetups math:spacing:split
+ \thickmuskip \defaultthickmuskip
+ \medmuskip \defaultmedmuskip
+ \thinmuskip \defaultthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+ \prebinoppenalty\defaultprebinoppenalty
+ \prerelpenalty \defaultprerelpenalty
\stopsetups
\startsetups math:spacing:half
- \thickmuskip \halfthickmuskip
- \medmuskip \halfmedmuskip
- \thinmuskip \halfthinmuskip
- \relpenalty \defaultrelpenalty
- \binoppenalty \defaultbinoppenalty
+ \thickmuskip \halfthickmuskip
+ \medmuskip \halfmedmuskip
+ \thinmuskip \halfthinmuskip
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \defaultbinoppenalty
+ \prebinoppenalty\maxdimen
+ \prerelpenalty \maxdimen
\stopsetups
\startsetups math:spacing:tight
@@ -325,8 +359,10 @@
\medmuskip 1\halfmedmuskip
\thinmuskip 1\halfthinmuskip
\fi
- \relpenalty \defaultrelpenalty
- \binoppenalty \maxdimen
+ \relpenalty \defaultrelpenalty
+ \binoppenalty \maxdimen
+ \prebinoppenalty\maxdimen
+ \prerelpenalty \maxdimen
\stopsetups
\startsetups math:spacing:fixed
@@ -339,8 +375,10 @@
\medmuskip 1\halfmedmuskip
\thinmuskip 1\halfthinmuskip
\fi
- \relpenalty \maxdimen
- \binoppenalty \maxdimen
+ \relpenalty \maxdimen
+ \binoppenalty \maxdimen
+ \prebinoppenalty\maxdimen
+ \prerelpenalty \maxdimen
\stopsetups
% \dorecurse{80}{test \m[i:tight]{\red \fakeformula} test }
@@ -632,7 +670,7 @@
\letcsnamecsname\csname#1\endcsname\csname\??mathcommand#1\endcsname}
\unexpanded\def\mathcommand#1%
- {\csname\??mathcommand#1\endcsname}
+ {\begincsname\??mathcommand#1\endcsname}
%D Let's define a few comands here:
@@ -757,29 +795,21 @@
\c!functionstyle=, % rm ss etc i.e. known alternatives, otherwise math
\c!functioncolor=]
-% \unexpanded\def\math_mfunction_styled
-% {\edef\m_math_text_choice_face{\textstyleface\normalmathstyle}%
-% \dowithnextbox
-% {\mathop{\box\nextbox}}%
-% \hbox\bgroup
-% \usemathematicsstyleandcolor\c!functionstyle\c!functioncolor
-% \m_math_text_choice_face
-% \let\next}
-
\unexpanded\def\math_mfunction_styled
{\begingroup
\usemathematicscolorparameter\c!functioncolor
\edef\p_functionstyle{\mathematicsparameter\c!functionstyle}%
\ifx\p_functionstyle\empty
\expandafter\math_mfunction_styled_none
- \else\ifcsname\??alternativestyles\p_functionstyle\endcsname
+ \else\ifcsname\??alternativestyle\p_functionstyle\endcsname
\doubleexpandafter\math_mfunction_styled_text
\else
\doubleexpandafter\math_mfunction_styled_math
\fi\fi}
\unexpanded\def\math_mfunction_styled_text#1%
- {\mathoptext{\csname\??alternativestyles\p_functionstyle\endcsname#1}%
+ %{\mathoptext{\csname\??alternativestyle\p_functionstyle\endcsname#1}%
+ {\expandafter\mathoptext\expandafter{\lastnamedcs#1}%
\endgroup}
\unexpanded\def\math_mfunction_styled_math#1%
@@ -920,6 +950,75 @@
\activatemathcharacter\underscoreasciicode
\activatemathcharacter\ampersandasciicode
+\appendtoks
+ \edef\p_ampersand{\mathematicsparameter\s!ampersand}%
+ \ifx\p_ampersand\v!normal
+ \let\specialmathaligntab\normalmathaligntab
+ \else
+ \let\specialmathaligntab\mathampersand
+ \fi
+\to \everysetupmathematics
+
+%D A simplified version of this code is:
+%D
+%D \starttyping
+%D \catcode"26=12
+%D
+%D \bgroup
+%D \global\mathcode"26="8000
+%D
+%D \catcode"26=4
+%D
+%D \xdef\normalmathaligntab{&}
+%D
+%D \catcode"26=13
+%D
+%D \global\everymath{\def&{\normalmathaligntab}}
+%D \egroup
+%D \stoptyping
+%D
+%D The following works okay:
+%D
+%D \starttyping
+%D A & B
+%D \stoptyping
+%D
+%D As does:
+%D
+%D \starttyping
+%D $A \Umathchar"2"0"26 B$
+%D \stoptyping
+%D
+%D But the next code:
+%D
+%D \starttyping
+%D $A \char"26 B$
+%D \stoptyping
+%D
+%D fails with: \type{Misplaced alignment tab character &} and here is the
+%D reason.
+%D
+%D When we have a letter or other category a check happens for an active
+%D character and when it has one then it gets expanded and fed back into the
+%D scanner (sort of).
+%D
+%D A \type {\char} is also fed back as raw character and again when it's letter
+%D of other goes through the same process.
+%D
+%D This means that we cannot have a definition like:
+%D
+%D \starttyping
+%D \def\AND{\char"26\relax}
+%D \stoptyping
+%D
+%D that can be used in math mode, which is why the cweb macros do:
+%D
+%D \starttyping
+%D \def\AND{\def\AND{\mathchar"2026\relax}\AND}
+%D \stoptyping
+%D
+%D Maybe we need an option to treat chars like chars.
+
% \activatemathcharacter\primeasciicode
% not used:
@@ -1037,17 +1136,33 @@
%
% \egroup
+% $\char26$ gives an error because it expands to an active character that
+% then becomes an & which is then seen as an alignment character; anyway,
+% even when we switch like this the diufferen tmeaning only happens when
+% we're in math mode which can be delayed till we're in a cell
+
\bgroup
\catcode\underscoreasciicode\activecatcode
\catcode\circumflexasciicode\activecatcode
\catcode\ampersandasciicode \activecatcode
+ \glet\specialmathaligntab\normalmathaligntab
+
+ \unexpanded\gdef\obeymathcatcodes
+ {\let _\normalsubscript
+ \let ^\normalsuperscript
+ \def &\specialmathaligntab
+ }
+
\doglobal \appendtoks
\let _\normalsubscript
\let ^\normalsuperscript
- \let &\normalmathaligntab % use \def when it's \aligntab
- \to \everymathematics
+ \let &\specialmathaligntab
+ \to \everymathematics
+
+ % \unexpanded\gdef\normalmathampersands
+ % {\let\specialmathaligntab\mathampersand}
\egroup
@@ -1304,6 +1419,39 @@
\s!lcgreek=\v!italic,
\s!ucgreek=\v!normal] % was: none
+%D Math collapsing (ligatures)
+
+\installcorenamespace{mathcollapsing}
+
+\setnewconstant\c_math_collapsing_attribute\attributeunsetvalue
+
+\letvalue{\??mathcollapsing 1}\plusone % specials
+\letvalue{\??mathcollapsing 2}\plustwo % specials + mathlist
+\letvalue{\??mathcollapsing 3}\plusthree % mathlist + specials
+\letvalue{\??mathcollapsing\v!none }\attributeunsetvalue
+\letvalue{\??mathcollapsing\v!reset}\attributeunsetvalue
+
+\def\math_collapsing_initialize
+ {\ifnum\c_math_collapsing_attribute=\attributeunsetvalue \else
+ \clf_initializemathcollapsing % one time
+ \global\let\math_collapsing_initialize\relax
+ \fi}
+
+\appendtoks
+ \edef\p_collapsing{\mathematicsparameter\s!collapsing}%
+ \c_math_collapsing_attribute
+ \ifcsname\??mathcollapsing\p_collapsing\endcsname\lastnamedcs\else\attributeunsetvalue\fi
+ \relax
+\to \everyswitchmathematics % only in mathematics
+
+\appendtoks
+ \math_collapsing_initialize
+ \attribute\mathcollapsingattribute\c_math_collapsing_attribute
+\to \everymathematics
+
+\setupmathematics
+ [\s!collapsing=3] % mathlist wins over specials
+
%D Math italics (experiment)
%D We need keys but what names to use and because we have hardcoded solution
@@ -1326,14 +1474,6 @@
\global\let\math_italics_initialize\relax
\fi}
-% \appendtoks
-% \edef\p_italics{\mathematicsparameter\s!italics}%
-% \c_math_italics_attribute\csname\??mathitalics
-% \ifcsname\??mathitalics\p_italics\endcsname\p_italics\else\v!none\fi
-% \endcsname\relax
-% % \math_italics_initialize
-% \to \everyswitchmathematics % only in mathematics
-
\appendtoks
\edef\p_italics{\mathematicsparameter\s!italics}%
\c_math_italics_attribute
@@ -1347,8 +1487,8 @@
\attribute\mathitalicsattribute\c_math_italics_attribute
\to \everymathematics
-\setupmathematics
- [\s!italics=3] % 4 is probably better
+% \setupmathematics % done later
+% [\s!italics=3] % 4 is probably better
% looks nicer but can generate bogus csnames
%
@@ -1466,44 +1606,50 @@
%
% The next one is more efficient as it produces more flat noad lists for numbers.
-\setnewconstant\c_math_comma "002C
-\setnewconstant\c_math_period "002E
-\setnewconstant\c_math_special"8000
+\setnewconstant\c_math_comma "002C
+\setnewconstant\c_math_period "002E
+%setnewconstant\c_math_colon "003A
+\setnewconstant\c_math_semicolon"003B
+\setnewconstant\c_math_special "8000
-\def\math_set_o_period{\Umathcode\c_math_period\mathordcode \zerocount\c_math_period}
-\def\math_set_p_period{\Umathcode\c_math_period\mathpunctcode\zerocount\c_math_period}
-\def\math_set_o_comma {\Umathcode\c_math_comma \mathordcode \zerocount\c_math_comma }
-\def\math_set_p_comma {\Umathcode\c_math_comma \mathpunctcode\zerocount\c_math_comma }
+\def\math_set_o_comma {\Umathcode\c_math_comma \mathordcode \zerocount\c_math_comma}
+\def\math_set_p_comma {\Umathcode\c_math_comma \mathpunctcode\zerocount\c_math_comma}
+\def\math_set_o_period {\Umathcode\c_math_period \mathordcode \zerocount\c_math_period}
+\def\math_set_p_period {\Umathcode\c_math_period \mathpunctcode\zerocount\c_math_period}
+\def\math_set_o_semicolon{\Umathcode\c_math_semicolon\mathordcode \zerocount\c_math_semicolon}
+\def\math_set_p_semicolon{\Umathcode\c_math_semicolon\mathpunctcode\zerocount\c_math_semicolon}
\edef\math_set_o_both {\math_set_o_period\math_set_o_comma}
\edef\math_set_p_both {\math_set_p_period\math_set_p_comma}
-\unexpanded\def\math_punctuation_nop_comma {\begingroup\math_set_p_comma ,\endgroup}
-\unexpanded\def\math_punctuation_nop_period{\begingroup\math_set_o_period.\endgroup}
+\unexpanded\def\math_punctuation_nop_comma {\begingroup\math_set_p_comma ,\endgroup}
+\unexpanded\def\math_punctuation_nop_period {\begingroup\math_set_o_period .\endgroup}
+\unexpanded\def\math_punctuation_nop_semicolon{\begingroup\math_set_p_semicolon;\endgroup}
-\unexpanded\def\math_punctuation_all_comma {\futurelet\nexttoken\math_punctuation_comma_next}
-\unexpanded\def\math_punctuation_all_period{\futurelet\nexttoken\math_punctuation_period_next}
+\unexpanded\def\math_punctuation_all_comma {\futurelet\nexttoken\math_punctuation_comma_next}
+\unexpanded\def\math_punctuation_all_period {\futurelet\nexttoken\math_punctuation_period_next}
+\unexpanded\def\math_punctuation_all_semicolon{\futurelet\nexttoken\math_punctuation_semicolon_next}
- \let\math_punctuation_yes_comma \math_punctuation_all_comma
- \let\math_punctuation_yes_period\math_punctuation_nop_period
+ \let\math_punctuation_yes_comma \math_punctuation_all_comma
+ \let\math_punctuation_yes_period \math_punctuation_nop_period
+ \let\math_punctuation_yes_semicolon\math_punctuation_all_semicolon
-\def\math_punctuation_comma_next {\begingroup\Umathcode\c_math_comma \ifx\nexttoken\blankspace\mathordcode\else\mathordcode\fi\zerocount\c_math_comma ,\endgroup}
-\def\math_punctuation_period_next{\begingroup\Umathcode\c_math_period\ifx\nexttoken\blankspace\mathordcode\else\mathordcode\fi\zerocount\c_math_period.\endgroup}
-
-\setnewconstant\c_math_comma "002C
-\setnewconstant\c_math_period "002E
-\setnewconstant\c_math_special"8000
+\def\math_punctuation_comma_next {\begingroup\Umathcode\c_math_comma \ifx\nexttoken\blankspace\mathordcode\else\mathordcode\fi\zerocount\c_math_comma ,\endgroup}
+\def\math_punctuation_period_next {\begingroup\Umathcode\c_math_period \ifx\nexttoken\blankspace\mathordcode\else\mathordcode\fi\zerocount\c_math_period .\endgroup}
+\def\math_punctuation_semicolon_next{\begingroup\Umathcode\c_math_semicolon\ifx\nexttoken\blankspace\mathordcode\else\mathordcode\fi\zerocount\c_math_semicolon;\endgroup}
\installcorenamespace {mathautopunctuation}
\bgroup
- \catcode\c_math_comma \activecatcode
- \catcode\c_math_period\activecatcode
+ \catcode\c_math_comma \activecatcode
+ \catcode\c_math_period \activecatcode
+ \catcode\c_math_semicolon\activecatcode
\setgvalue{\??mathautopunctuation\v!no}%
{\let,\math_punctuation_nop_comma
- \let.\math_punctuation_nop_period}
+ \let.\math_punctuation_nop_period
+ \let;\math_punctuation_nop_semicolon}
% more efficient list:
%
@@ -1513,26 +1659,51 @@
\setgvalue{\??mathautopunctuation\v!yes}%
{\let,\math_punctuation_yes_comma
- \let.\math_punctuation_yes_period}
+ \let.\math_punctuation_yes_period
+ \let;\math_punctuation_nop_semicolon}
\setgvalue{\??mathautopunctuation\v!all}%
{\let,\math_punctuation_all_comma
- \let.\math_punctuation_all_period}
+ \let.\math_punctuation_all_period
+ \let;\math_punctuation_nop_semicolon}
+
+ \setgvalue{\??mathautopunctuation comma}%
+ {\let,\math_punctuation_yes_comma
+ \let.\math_punctuation_yes_period
+ \let;\math_punctuation_nop_semicolon}
+
+ \setgvalue{\??mathautopunctuation\v!yes\string,semicolon}%
+ {\let,\math_punctuation_yes_comma
+ \let.\math_punctuation_yes_period
+ \let;\math_punctuation_yes_semicolon}
+
+ \setgvalue{\??mathautopunctuation comma\string,semicolon}%
+ {\let,\math_punctuation_yes_comma
+ \let.\math_punctuation_yes_period
+ \let;\math_punctuation_yes_semicolon}
+
+ \setgvalue{\??mathautopunctuation\v!all\string,semicolon}%
+ {\let,\math_punctuation_all_comma
+ \let.\math_punctuation_all_period
+ \let;\math_punctuation_all_semicolon}
\egroup
% \appendtoks
-% \global\mathcode\c_math_comma \c_math_special
-% \global\mathcode\c_math_period\c_math_special
+% \global\mathcode\c_math_comma \c_math_special
+% \global\mathcode\c_math_period \c_math_special
+% \global\mathcode\c_math_semicolon\c_math_special
% \to \everyjob
% \activatemathcharacter\c_math_comma
% \activatemathcharacter\c_math_period
+% \activatemathcharacter\c_math_semicolon
\appendtoks
- \mathcode\c_math_comma \c_math_special
- \mathcode\c_math_period\c_math_special
- \csname\??mathautopunctuation\mathematicsparameter\v!autopunctuation\endcsname
+ \mathcode\c_math_comma \c_math_special
+ \mathcode\c_math_period \c_math_special
+ \mathcode\c_math_semicolon\c_math_special
+ \begincsname\??mathautopunctuation\mathematicsparameter\v!autopunctuation\endcsname
\to \everymathematics
\appendtoks
@@ -1545,7 +1716,25 @@
\def\disablemathpunctuation{\csname\??mathautopunctuation\v!yes\endcsname}
\setupmathematics
- [\v!autopunctuation=\v!no] % no | yes | all
+ [\v!autopunctuation=\v!no] % no | yes | all | comma | yes,semicolon | all,semicolon
+
+%D The consequences of settign this are as follows:
+%D
+%D \def\TestA#1#2#3%
+%D {\ifnum#1=0 \type{#2}\else\setupmathematics[autopunctuation={#2}]$#3$\fi}
+%D \def\TestB#1#2%
+%D {\NC \TestA{#1}{no} {#2}
+%D \NC \TestA{#1}{yes} {#2}
+%D \NC \TestA{#1}{yes,semicolon}{#2}
+%D \NC \TestA{#1}{all} {#2}
+%D \NC \TestA{#1}{all,semicolon}{#2}
+%D \NC \NR}
+%D \starttabulate[|c|c|c|c|c|]
+%D \TestB{0}{}
+%D \TestB{1}{(1,2)=(1, 2)}
+%D \TestB{1}{(1.2)=(1. 2)}
+%D \TestB{1}{(1;2)=(1; 2)}
+%D \stoptabulate
%D \macros
%D {mathstyle}
@@ -1610,6 +1799,19 @@
% error
\fi}
+\def\triggeredmathstyleparameter#1% to bypass the relax
+ {\ifcase\numexpr\normalmathstyle\relax
+ #1\displaystyle \or % 0
+ #1\crampeddisplaystyle \or % 1
+ #1\textstyle \or % 2
+ #1\crampedtextstyle \or % 3
+ #1\scriptstyle \or % 4
+ #1\crampedscriptstyle \or % 5
+ #1\scriptscriptstyle \or % 6
+ #1\crampedscriptscriptstyle \else
+ % error
+ \fi}
+
\def\mathstylefont#1% #1 is number (\normalmathstyle)
{\ifcase\numexpr#1\relax
\textfont \or
@@ -1621,7 +1823,20 @@
\scriptscriptfont \or
\scriptscriptfont \else
\textfont
- \fi\zerocount} % hm, can ie other value as well
+ \fi\fam} % was \zerocount
+
+\def\somemathstylefont#1% #1 is number (\normalmathstyle)
+ {\ifcase\numexpr#1\relax
+ \textfont \or
+ \textfont \or
+ \textfont \or
+ \textfont \or
+ \scriptfont \or
+ \scriptfont \or
+ \scriptscriptfont \or
+ \scriptscriptfont \else
+ \textfont
+ \fi}
\def\mathsmallstylefont#1% #1 is number (\normalmathstyle)
{\ifcase\numexpr#1\relax
@@ -1634,7 +1849,7 @@
\scriptscriptfont \or
\scriptscriptfont \else
\scriptfont
- \fi\zerocount} % hm, can ie other value as well
+ \fi\fam} % was \zerocount
\def\mathstyleface#1% #1 is number (\normalmathstyle)
{\ifcase\numexpr#1\relax
@@ -1701,6 +1916,46 @@
\unexpanded\def\showmathstyle{\verbosemathstyle\normalmathstyle}
+%D Handy too:
+
+\def\mathcharwd{\fontcharwd\mathstylefont\normalmathstyle}
+\def\mathcharht{\fontcharht\mathstylefont\normalmathstyle}
+\def\mathchardp{\fontchardp\mathstylefont\normalmathstyle}
+
+%D Some dimension fun:
+
+\def\mathexheight
+ {\fontdimen
+ \plusfive
+ \ifcase\numexpr\normalmathstyle\relax
+ \textfont \or % 0
+ \textfont \or % 1
+ \textfont \or % 2
+ \textfont \or % 3
+ \scriptfont \or % 4
+ \scriptfont \or % 5
+ \scriptscriptfont \or % 6
+ \scriptscriptfont \else
+ \textfont
+ \fi
+ \zeropoint}
+
+\def\mathemwidth
+ {\fontdimen
+ \plussix
+ \ifcase\numexpr\normalmathstyle\relax
+ \textfont \or % 0
+ \textfont \or % 1
+ \textfont \or % 2
+ \textfont \or % 3
+ \scriptfont \or % 4
+ \scriptfont \or % 5
+ \scriptscriptfont \or % 6
+ \scriptscriptfont \else
+ \textfont
+ \fi
+ \zeropoint}
+
%D A plain inheritance:
\def\mathpalette#1#2%
@@ -1720,9 +1975,16 @@
% to be tested: {#1} but it could have side effects
-\unexpanded\def\mathstylehbox#1% sensitive for: a \over b => {a\over b} or \frac{a}{b}
- {\normalexpanded{\hbox\bgroup
- \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
+% \unexpanded\def\mathstylehbox#1% sensitive for: a \over b => {a\over b} or \frac{a}{b}
+% {\normalexpanded{\hbox\bgroup
+% \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#1\stopimath\egroup}
+
+\unexpanded\def\mathstylehbox#1#% sensitive for: a \over b => {a\over b} or \frac{a}{b}
+ {\math_style_hbox{#1}}
+
+\unexpanded\def\math_style_hbox#1#2% sensitive for: a \over b => {a\over b} or \frac{a}{b}
+ {\normalexpanded{\hbox#1\bgroup
+ \startimath\triggermathstyle\normalmathstyle}\mathsurround\zeropoint#2\stopimath\egroup}
\unexpanded\def\mathstylevbox#1%
{\normalexpanded{\vbox\bgroup
@@ -2204,6 +2466,13 @@
% \ruledhbox{$\mathtext{abc ffi}$}
% \ruledhbox{$\mathword{abc ffi}$}
+% I need to decide:
+%
+%mathscriptboxmode\zerocount % no kerning
+%mathscriptboxmode\plusone % lists
+\mathscriptboxmode\plustwo % lists and boxes
+%mathscriptboxmode\plusthree % lists and boxes with \boundary=1 (also for testing and demo)
+
\unexpanded\def\mathtext {\mathortext{\math_text_choice_font\relax}\hbox}
\unexpanded\def\mathword {\mathortext{\math_text_choice_word\relax}\hbox}
diff --git a/tex/context/base/mkiv/math-map.lua b/tex/context/base/mkiv/math-map.lua
index cf9353e95..47e5cda0b 100644
--- a/tex/context/base/mkiv/math-map.lua
+++ b/tex/context/base/mkiv/math-map.lua
@@ -33,7 +33,6 @@ if not modules then modules = { } end modules ['math-map'] = {
-- plus add them to the regular vectors below so that they honor \it etc
local type, next = type, next
-local floor, div = math.floor, math.div
local merged, sortedhash = table.merged, table.sortedhash
local extract = bit32.extract
@@ -118,6 +117,7 @@ mathematics.gaps = allocate {
[0x1D4AD] = 0x0211B, -- ℛ script R
[0x1D4BA] = 0x0212F, -- ℯ script e
[0x1D4BC] = 0x0210A, -- ℊ script g
+ -- [0x1D4C1] = 0x02113, -- exception: liter
[0x1D4C4] = 0x02134, -- ℴ script o
[0x1D506] = 0x0212D, -- ℭ fraktur C
[0x1D50B] = 0x0210C, -- ℌ fraktur H
@@ -741,17 +741,17 @@ function mathematics.remapalphabets(char,mathalphabet,mathgreek)
if not isgreek[char] then
-- nothing needed
elseif islcgreek[char] then
- local lc = extract(mathgreek,4,4)
+ local lc = extract(mathgreek,4,4) -- (mathgreek >> 4) & ~(-1 << 4)
if lc > 1 then
mathalphabet = remapgreek(mathalphabet,lc,"lowercase",char)
end
elseif isucgreek[char] then
- local uc = extract(mathgreek,0,4)
+ local uc = extract(mathgreek,0,4) -- (mathgreek >> 0) & ~(-1 << 4)
if uc > 1 then
mathalphabet = remapgreek(mathalphabet,uc,"uppercase",char)
end
elseif issygreek[char] then
- local sy = extract(mathgreek,8,4)
+ local sy = extract(mathgreek,8,4) -- (mathgreek >> 8) & ~(-1 << 4)
if sy > 1 then
mathalphabet = remapgreek(mathalphabet,sy,"symbol",char)
end
diff --git a/tex/context/base/mkiv/math-noa.lua b/tex/context/base/mkiv/math-noa.lua
index f9e8c9f70..4c1997a64 100644
--- a/tex/context/base/mkiv/math-noa.lua
+++ b/tex/context/base/mkiv/math-noa.lua
@@ -29,7 +29,7 @@ local formatters, gmatch = string.formatters, string.gmatch
local sortedhash = table.sortedhash
local insert, remove = table.insert, table.remove
local div = math.div
-local setbit, hasbit = number.setbit, number.hasbit
+local bor, band = bit32.bor, bit32.band
local fonts = fonts
local nodes = nodes
@@ -47,11 +47,14 @@ local registerdirective = directives.register
local logreporter = logs.reporter
local setmetatableindex = table.setmetatableindex
+local colortracers = nodes.tracers.colors
+
local trace_remapping = false registertracker("math.remapping", function(v) trace_remapping = v end)
local trace_processing = false registertracker("math.processing", function(v) trace_processing = v end)
local trace_analyzing = false registertracker("math.analyzing", function(v) trace_analyzing = v end)
local trace_normalizing = false registertracker("math.normalizing", function(v) trace_normalizing = v end)
local trace_collapsing = false registertracker("math.collapsing", function(v) trace_collapsing = v end)
+local trace_fixing = false registertracker("math.fixing", function(v) trace_foxing = v end)
local trace_patching = false registertracker("math.patching", function(v) trace_patching = v end)
local trace_goodies = false registertracker("math.goodies", function(v) trace_goodies = v end)
local trace_variants = false registertracker("math.variants", function(v) trace_variants = v end)
@@ -62,12 +65,13 @@ local trace_domains = false registertracker("math.domains", function
local trace_families = false registertracker("math.families", function(v) trace_families = v end)
local trace_fences = false registertracker("math.fences", function(v) trace_fences = v end)
-local check_coverage = true registerdirective("math.checkcoverage", function(v) check_coverage = v end)
+local check_coverage = true registerdirective("math.checkcoverage", function(v) check_coverage = v end)
local report_processing = logreporter("mathematics","processing")
local report_remapping = logreporter("mathematics","remapping")
local report_normalizing = logreporter("mathematics","normalizing")
local report_collapsing = logreporter("mathematics","collapsing")
+local report_fixing = logreporter("mathematics","fixing")
local report_patching = logreporter("mathematics","patching")
local report_goodies = logreporter("mathematics","goodies")
local report_variants = logreporter("mathematics","variants")
@@ -93,6 +97,7 @@ local setlist = nuts.setlist
local setnext = nuts.setnext
local setprev = nuts.setprev
local setchar = nuts.setchar
+local setfam = nuts.setfam
local setsubtype = nuts.setsubtype
local setattr = nuts.setattr
@@ -104,6 +109,7 @@ local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
local getfont = nuts.getfont
+local getfam = nuts.getfam
local getattr = nuts.getattr
local getlist = nuts.getlist
@@ -116,7 +122,6 @@ local setsub = nuts.setsub
local setsup = nuts.setsup
local flush_node = nuts.flush
-local new_node = nuts.new -- todo: pool: math_noad math_sub
local copy_node = nuts.copy
local slide_nodes = nuts.slide
local set_visual = nuts.setvisual
@@ -126,6 +131,10 @@ local mlist_to_hlist = nodes.mlist_to_hlist
local font_of_family = node.family_font
local new_kern = nodepool.kern
+local new_submlist = nodepool.submlist
+local new_noad = nodepool.noad
+local new_delimiter = nodepool.delimiter
+local new_fence = nodepool.fence
local fonthashes = fonts.hashes
local fontdata = fonthashes.identifiers
@@ -174,8 +183,8 @@ local math_noad = nodecodes.noad -- attr nucleus sub sup
local math_accent = nodecodes.accent -- attr nucleus sub sup accent
local math_radical = nodecodes.radical -- attr nucleus sub sup left degree
local math_fraction = nodecodes.fraction -- attr nucleus sub sup left right
-local math_box = nodecodes.subbox -- attr list
-local math_sub = nodecodes.submlist -- attr list
+local math_subbox = nodecodes.subbox -- attr list
+local math_submlist = nodecodes.submlist -- attr list
local math_char = nodecodes.mathchar -- attr fam char
local math_textchar = nodecodes.mathtextchar -- attr fam char
local math_delim = nodecodes.delim -- attr small_fam small_char large_fam large_char
@@ -219,7 +228,7 @@ local function process(start,what,n,parent)
elseif id == math_char then
local char = getchar(start)
local font = getfont(start)
- local fam = getfield(start,"fam")
+ local fam = getfam(start)
report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,nutstring(start),fam,font,char,char)
else
report_processing("%w%S",n*2,nutstring(start))
@@ -253,7 +262,7 @@ local function process(start,what,n,parent)
noad = getsub (start) if noad then process(noad,what,n,start) end -- list
elseif id == math_char or id == math_textchar or id == math_delim then
break
- elseif id == math_box or id == math_sub then
+ elseif id == math_subbox or id == math_submlist then
local noad = getlist(start) if noad then process(noad,what,n,start) end -- list (not getlist !)
elseif id == math_fraction then
local noad = getfield(start,"num") if noad then process(noad,what,n,start) end -- list
@@ -298,7 +307,7 @@ local function processnested(current,what,n)
noad = getnucleus(current) if noad then process(noad,what,n,current) end -- list
noad = getsup (current) if noad then process(noad,what,n,current) end -- list
noad = getsub (current) if noad then process(noad,what,n,current) end -- list
- elseif id == math_box or id == math_sub then
+ elseif id == math_subbox or id == math_submlist then
noad = getlist(current) if noad then process(noad,what,n,current) end -- list (not getlist !)
elseif id == math_fraction then
noad = getfield(current,"num") if noad then process(noad,what,n,current) end -- list
@@ -334,7 +343,7 @@ local function processstep(current,process,n,id)
noad = getnucleus(current) if noad then process(noad,n,current) end -- list
noad = getsup (current) if noad then process(noad,n,current) end -- list
noad = getsub (current) if noad then process(noad,n,current) end -- list
- elseif id == math_box or id == math_sub then
+ elseif id == math_subbox or id == math_submlist then
noad = getlist(current) if noad then process(noad,n,current) end -- list (not getlist !)
elseif id == math_fraction then
noad = getfield(current,"num") if noad then process(noad,n,current) end -- list
@@ -447,7 +456,7 @@ do
}
families[math_char] = function(pointer)
- if getfield(pointer,"fam") == 0 then
+ if getfam(pointer) == 0 then
local a = getattr(pointer,a_mathfamily)
if a and a > 0 then
setattr(pointer,a_mathfamily,0)
@@ -459,13 +468,13 @@ do
if trace_families then
report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
- setfield(pointer,"fam",newa)
+ setfam(pointer,newa)
elseif not fontcharacters[font_of_family(newa)][bold] then
if trace_families then
report_families("no bold character for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
end
if newa > 3 then
- setfield(pointer,"fam",newa-3)
+ setfam(pointer,newa-3)
end
else
setattr(pointer,a_exportstatus,char)
@@ -473,7 +482,7 @@ do
if trace_families then
report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
end
- setfield(pointer,"fam",newa)
+ setfam(pointer,newa)
end
else
local char = getchar(pointer)
@@ -485,7 +494,7 @@ do
if trace_families then
report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
end
- setfield(pointer,"fam",a)
+ setfam(pointer,a)
end
end
end
@@ -522,6 +531,30 @@ do
end
end
+ -- will become:
+
+ -- families[math_delim] = function(pointer)
+ -- if getfam(pointer) == 0 then
+ -- local a = getattr(pointer,a_mathfamily)
+ -- if a and a > 0 then
+ -- setattr(pointer,a_mathfamily,0)
+ -- if a > 5 then
+ -- -- no bold delimiters in unicode
+ -- a = a - 3
+ -- end
+ -- local char = getchar(pointer)
+ -- local okay = fontcharacters[font_of_family(a)][char]
+ -- if okay then
+ -- setfam(pointer,a)
+ -- elseif a > 2 then
+ -- setfam(pointer,a-3)
+ -- end
+ -- else
+ -- setfam(pointer,0)
+ -- end
+ -- end
+ -- end
+
families[math_textchar] = families[math_char]
function handlers.families(head,style,penalties)
@@ -535,20 +568,20 @@ end
do
- local a_mathalphabet = privateattribute("mathalphabet")
- local a_mathgreek = privateattribute("mathgreek")
+ local a_mathalphabet = privateattribute("mathalphabet")
+ local a_mathgreek = privateattribute("mathgreek")
+
+ local relocate = { }
- processors.relocate = { }
+ local remapalphabets = mathematics.remapalphabets
+ local fallbackstyleattr = mathematics.fallbackstyleattr
+ local setnodecolor = colortracers.set
local function report_remap(tag,id,old,new,extra)
report_remapping("remapping %s in font (%s,%s) from %C to %C%s",
tag,id,fontdata[id].properties.fontname or "",old,new,extra)
end
- local remapalphabets = mathematics.remapalphabets
- local fallbackstyleattr = mathematics.fallbackstyleattr
- local setnodecolor = nodes.tracers.colors.set
-
local function checked(pointer)
local char = getchar(pointer)
local font = getfont(pointer)
@@ -570,7 +603,7 @@ do
end
end
- processors.relocate[math_char] = function(pointer)
+ relocate[math_char] = function(pointer)
local g = getattr(pointer,a_mathgreek) or 0
local a = getattr(pointer,a_mathalphabet) or 0
local char = getchar(pointer)
@@ -637,20 +670,20 @@ do
end
end
- processors.relocate[math_textchar] = function(pointer)
+ relocate[math_textchar] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:init")
end
end
- processors.relocate[math_delim] = function(pointer)
+ relocate[math_delim] = function(pointer)
if trace_analyzing then
setnodecolor(pointer,"font:fina")
end
end
function handlers.relocate(head,style,penalties)
- processnoads(head,processors.relocate,"relocate")
+ processnoads(head,relocate,"relocate")
return true
end
@@ -658,32 +691,36 @@ end
-- rendering (beware, not exported)
-processors.render = { }
-
-local rendersets = mathematics.renderings.numbers or { } -- store
+do
-processors.render[math_char] = function(pointer)
- local attr = getattr(pointer,a_mathrendering)
- if attr and attr > 0 then
- local char = getchar(pointer)
- local renderset = rendersets[attr]
- if renderset then
- local newchar = renderset[char]
- if newchar then
- local font = getfont(pointer)
- local characters = fontcharacters[font]
- if characters and characters[newchar] then
- setchar(pointer,newchar)
- setattr(pointer,a_exportstatus,char)
+ local render = { }
+
+ local rendersets = mathematics.renderings.numbers or { } -- store
+
+ render[math_char] = function(pointer)
+ local attr = getattr(pointer,a_mathrendering)
+ if attr and attr > 0 then
+ local char = getchar(pointer)
+ local renderset = rendersets[attr]
+ if renderset then
+ local newchar = renderset[char]
+ if newchar then
+ local font = getfont(pointer)
+ local characters = fontcharacters[font]
+ if characters and characters[newchar] then
+ setchar(pointer,newchar)
+ setattr(pointer,a_exportstatus,char)
+ end
end
end
end
end
-end
-function handlers.render(head,style,penalties)
- processnoads(head,processors.render,"render")
- return true
+ function handlers.render(head,style,penalties)
+ processnoads(head,render,"render")
+ return true
+ end
+
end
-- some resize options (this works ok because the content is
@@ -696,347 +733,405 @@ end
-- todo: just replace the character by an ord noad
-- and remove the right delimiter as well
-local a_mathsize = privateattribute("mathsize") -- this might move into other fence code
-local resize = { }
-processors.resize = resize
+do
+
+ local a_mathsize = privateattribute("mathsize") -- this might move into other fence code
+ local resize = { }
-resize[math_fence] = function(pointer)
- local subtype = getsubtype(pointer)
- if subtype == left_fence_code or subtype == right_fence_code then
- local a = getattr(pointer,a_mathsize)
- if a and a > 0 then
- local method, size = div(a,100), a % 100
- setattr(pointer,a_mathsize,0)
- local delimiter = getfield(pointer,"delim")
- local chr = getfield(delimiter,"small_char")
- if chr > 0 then
- local fam = getfield(delimiter,"small_fam")
- local id = font_of_family(fam)
- if id > 0 then
- setfield(delimiter,"small_char",mathematics.big(fontdata[id],chr,size,method))
+ resize[math_fence] = function(pointer)
+ local subtype = getsubtype(pointer)
+ if subtype == left_fence_code or subtype == right_fence_code then
+ local a = getattr(pointer,a_mathsize)
+ if a and a > 0 then
+ local method, size = div(a,100), a % 100
+ setattr(pointer,a_mathsize,0)
+ local delimiter = getfield(pointer,"delim")
+ local chr = getfield(delimiter,"small_char")
+ if chr > 0 then
+ local fam = getfield(delimiter,"small_fam")
+ local id = font_of_family(fam)
+ if id > 0 then
+ setfield(delimiter,"small_char",mathematics.big(fontdata[id],chr,size,method))
+ end
end
end
end
end
-end
-function handlers.resize(head,style,penalties)
- processnoads(head,resize,"resize")
- return true
+ -- will become:
+
+ -- resize[math_fence] = function(pointer)
+ -- local subtype = getsubtype(pointer)
+ -- if subtype == left_fence_code or subtype == right_fence_code then
+ -- local a = getattr(pointer,a_mathsize)
+ -- if a and a > 0 then
+ -- local method, size = div(a,100), a % 100
+ -- setattr(pointer,a_mathsize,0)
+ -- local delimiter = getfield(pointer,"delim")
+ -- local chr = getchar(delimiter)
+ -- if chr > 0 then
+ -- local fam = getfam(delimiter)
+ -- local id = font_of_family(fam)
+ -- if id > 0 then
+ -- setchar(delimiter,mathematics.big(fontdata[id],chr,size,method))
+ -- end
+ -- end
+ -- end
+ -- end
+ -- end
+
+ function handlers.resize(head,style,penalties)
+ processnoads(head,resize,"resize")
+ return true
+ end
+
end
-- still not perfect:
-local a_autofence = privateattribute("mathautofence")
-local autofences = { }
-processors.autofences = autofences
-local dummyfencechar = 0x2E
-
-local function makefence(what,char)
- local d = new_node(math_delim)
- local f = new_node(math_fence)
- if char then
- local sym = getnucleus(char)
- local chr = getchar(sym)
- local fam = getfield(sym,"fam")
- if chr == dummyfencechar then
- chr = 0
+do
+
+ local a_autofence = privateattribute("mathautofence")
+ local autofences = { }
+ local dummyfencechar = 0x2E
+
+ local function makefence(what,char)
+ local d = new_delimiter()
+ local f = new_fence()
+ if char then
+ local sym = getnucleus(char)
+ local chr = getchar(sym)
+ local fam = getfam(sym)
+ if chr == dummyfencechar then
+ chr = 0
+ end
+ setfield(d,"small_char",chr)
+ setfield(d,"small_fam",fam)
+ flush_node(sym)
end
- setfield(d,"small_char",chr)
- setfield(d,"small_fam", fam)
- flush_node(sym)
- end
- setsubtype(f,what)
- setfield(f,"delim",d)
- setfield(f,"class",-1) -- tex itself does this, so not fenceclasses[what]
- return f
-end
+ setsubtype(f,what)
+ setfield(f,"delim",d)
+ setfield(f,"class",-1) -- tex itself does this, so not fenceclasses[what]
+ return f
+ end
-local function makelist(noad,f_o,o_next,c_prev,f_c,middle)
- local list = new_node(math_sub)
- setlist(list,f_o)
- setsubtype(noad,noad_inner)
- setnucleus(noad,list)
- setlink(f_o,o_next)
- setlink(c_prev,f_c)
- if middle and next(middle) then
- local prev = f_o
- local current = o_next
- while current ~= f_c do
- local m = middle[current]
- if m then
- local next = getnext(current)
- local fence = makefence(middle_fence_code,current)
- setnucleus(current)
- flush_node(current)
- middle[current] = nil
- -- replace_node
- setlink(prev,fence,next)
- prev = fence
- current = next
- else
- prev = current
- current = getnext(current)
+ -- will become
+
+ -- local function makefence(what,char)
+ -- local d = new_delimiter()
+ -- local f = new_fence()
+ -- if char then
+ -- local sym = getnucleus(char)
+ -- local chr = getchar(sym)
+ -- local fam = getfam(sym)
+ -- if chr == dummyfencechar then
+ -- chr = 0
+ -- end
+ -- setchar(d,chr)
+ -- setfam(d,fam)
+ -- flush_node(sym)
+ -- end
+ -- setsubtype(f,what)
+ -- setfield(f,"delim",d)
+ -- setfield(f,"class",-1) -- tex itself does this, so not fenceclasses[what]
+ -- return f
+ -- end
+
+ local function makelist(noad,f_o,o_next,c_prev,f_c,middle)
+ local list = new_submlist()
+ setlist(list,f_o)
+ setsubtype(noad,noad_inner)
+ setnucleus(noad,list)
+ setlink(f_o,o_next)
+ setlink(c_prev,f_c)
+ if middle and next(middle) then
+ local prev = f_o
+ local current = o_next
+ while current ~= f_c do
+ local m = middle[current]
+ if m then
+ local next = getnext(current)
+ local fence = makefence(middle_fence_code,current)
+ setnucleus(current)
+ flush_node(current)
+ middle[current] = nil
+ -- replace_node
+ setlink(prev,fence,next)
+ prev = fence
+ current = next
+ else
+ prev = current
+ current = getnext(current)
+ end
end
end
end
-end
-local function convert_both(open,close,middle)
- local o_prev, o_next = getboth(open)
- local c_prev, c_next = getboth(close)
- if o_next == close then
- return close
- else
+ local function convert_both(open,close,middle)
+ local o_prev, o_next = getboth(open)
+ local c_prev, c_next = getboth(close)
+ if o_next == close then
+ return close
+ else
+ local f_o = makefence(left_fence_code,open)
+ local f_c = makefence(right_fence_code,close)
+ makelist(open,f_o,o_next,c_prev,f_c,middle)
+ setnucleus(close)
+ flush_node(close)
+ if c_next then
+ setprev(c_next,open)
+ end
+ setnext(open,c_next)
+ return open
+ end
+ end
+
+ local function convert_open(open,last,middle)
local f_o = makefence(left_fence_code,open)
- local f_c = makefence(right_fence_code,close)
- makelist(open,f_o,o_next,c_prev,f_c,middle)
- setnucleus(close)
- flush_node(close)
- if c_next then
- setprev(c_next,open)
+ local f_c = makefence(right_fence_code)
+ local o_prev, o_next = getboth(open)
+ local l_prev, l_next = getboth(last)
+ makelist(open,f_o,o_next,last,f_c,middle)
+ if l_next then
+ setprev(l_next,open)
end
- setnext(open,c_next)
+ setnext(open,l_next)
return open
end
-end
-
-local function convert_open(open,last,middle)
- local f_o = makefence(left_fence_code,open)
- local f_c = makefence(right_fence_code)
- local o_prev, o_next = getboth(open)
- local l_prev, l_next = getboth(last)
- makelist(open,f_o,o_next,last,f_c,middle)
- if l_next then
- setprev(l_next,open)
- end
- setnext(open,l_next)
- return open
-end
-local function convert_close(close,first,middle)
- local f_o = makefence(left_fence_code)
- local f_c = makefence(right_fence_code,close)
- local c_prev = getprev(close)
- makelist(close,f_o,first,c_prev,f_c,middle)
- return close
-end
+ local function convert_close(close,first,middle)
+ local f_o = makefence(left_fence_code)
+ local f_c = makefence(right_fence_code,close)
+ local c_prev = getprev(close)
+ makelist(close,f_o,first,c_prev,f_c,middle)
+ return close
+ end
-local stacks = setmetatableindex("table")
-
-local function processfences(pointer,n,parent)
- local current = pointer
- local last = pointer
- local start = pointer
- local done = false
- local initial = pointer
- local stack = nil
- local middle = nil -- todo: use properties
- while current do
- local id = getid(current)
- if id == math_noad then
- local a = getattr(current,a_autofence)
- if a and a > 0 then
- local stack = stacks[n]
- setattr(current,a_autofence,0)
- if a == 1 or (a == 4 and (not stack or #stack == 0)) then
- if trace_fences then
- report_fences("%2i: pushing open on stack",n)
- end
- insert(stack,current)
- elseif a == 2 or a == 4 then
- local open = remove(stack)
- if open then
+ local stacks = setmetatableindex("table")
+
+ local function processfences(pointer,n,parent)
+ local current = pointer
+ local last = pointer
+ local start = pointer
+ local done = false
+ local initial = pointer
+ local stack = nil
+ local middle = nil -- todo: use properties
+ while current do
+ local id = getid(current)
+ if id == math_noad then
+ local a = getattr(current,a_autofence)
+ if a and a > 0 then
+ local stack = stacks[n]
+ setattr(current,a_autofence,0)
+ if a == 1 or (a == 4 and (not stack or #stack == 0)) then
if trace_fences then
- report_fences("%2i: handling %s, stack depth %i",n,"both",#stack+1)
+ report_fences("%2i: pushing open on stack",n)
+ end
+ insert(stack,current)
+ elseif a == 2 or a == 4 then
+ local open = remove(stack)
+ if open then
+ if trace_fences then
+ report_fences("%2i: handling %s, stack depth %i",n,"both",#stack+1)
+ end
+ current = convert_both(open,current,middle)
+ elseif current == start then
+ -- skip
+ else
+ if trace_fences then
+ report_fences("%2i: handling %s, stack depth %i",n,"close",#stack+1)
+ end
+ current = convert_close(current,initial,middle)
+ if not parent then
+ initial = current
+ end
end
- current = convert_both(open,current,middle)
- elseif current == start then
- -- skip
- else
if trace_fences then
- report_fences("%2i: handling %s, stack depth %i",n,"close",#stack+1)
+ report_fences("%2i: popping close from stack",n)
end
- current = convert_close(current,initial,middle)
- if not parent then
- initial = current
+ elseif a == 3 then
+ if trace_fences then
+ report_fences("%2i: registering middle",n)
+ end
+ if middle then
+ middle[current] = last
+ else
+ middle = { [current] = last }
end
end
- if trace_fences then
- report_fences("%2i: popping close from stack",n)
- end
- elseif a == 3 then
- if trace_fences then
- report_fences("%2i: registering middle",n)
- end
- if middle then
- middle[current] = last
- else
- middle = { [current] = last }
- end
+ done = true
+ else
+ processstep(current,processfences,n+1,id)
end
- done = true
else
- processstep(current,processfences,n+1,id)
+ -- next at current level
+ processstep(current,processfences,n,id)
end
- else
- -- next at current level
- processstep(current,processfences,n,id)
+ last = current
+ current = getnext(current)
end
- last = current
- current = getnext(current)
- end
- if done then
- local stack = stacks[n]
- local s = #stack
- if s > 0 then
- if trace_fences then
- report_fences("%2i: handling %s stack levels",n,s)
- end
- for i=1,s do
- local open = remove(stack)
+ if done then
+ local stack = stacks[n]
+ local s = #stack
+ if s > 0 then
if trace_fences then
- report_fences("%2i: handling %s, stack depth %i",n,"open",#stack)
+ report_fences("%2i: handling %s stack levels",n,s)
+ end
+ for i=1,s do
+ local open = remove(stack)
+ if trace_fences then
+ report_fences("%2i: handling %s, stack depth %i",n,"open",#stack)
+ end
+ last = convert_open(open,last,middle)
end
- last = convert_open(open,last,middle)
end
end
end
-end
--- we can have a first changed node .. an option is to have a leading dummy node in math
--- lists like the par node as it can save a lot of mess
+ -- we can have a first changed node .. an option is to have a leading dummy node in math
+ -- lists like the par node as it can save a lot of mess
-local enabled = false
+ local enabled = false
-implement {
- name = "enableautofences",
- onlyonce = true,
- actions = function()
- enableaction("math","noads.handlers.autofences")
- enabled = true
- end
-}
+ implement {
+ name = "enableautofences",
+ onlyonce = true,
+ actions = function()
+ enableaction("math","noads.handlers.autofences")
+ enabled = true
+ end
+ }
-function handlers.autofences(head,style,penalties)
- if enabled then -- tex.modes.c_math_fences_auto
- -- inspect(nodes.totree(head))
- processfences(tonut(head),1)
- -- inspect(nodes.totree(head))
+ function handlers.autofences(head,style,penalties)
+ if enabled then -- tex.modes.c_math_fences_auto
+ -- inspect(nodes.totree(head))
+ processfences(tonut(head),1)
+ -- inspect(nodes.totree(head))
+ end
end
+
end
-- normalize scripts
-local unscript = { } noads.processors.unscript = unscript
-local superscripts = characters.superscripts
-local subscripts = characters.subscripts
-local fractions = characters.fractions
-local replaced = { }
-
-local function replace(pointer,what,n,parent)
- pointer = parent -- we're following the parent list (chars trigger this)
- local next = getnext(pointer)
- local start_super, stop_super, start_sub, stop_sub
- local mode = "unset"
- while next and getid(next) == math_noad do
- local nextnucleus = getnucleus(next)
- if nextnucleus and getid(nextnucleus) == math_char and not getsub(next) and not getsup(next) then
- local char = getchar(nextnucleus)
- local s = superscripts[char]
- if s then
- if not start_super then
- start_super = next
- mode = "super"
- elseif mode == "sub" then
- break
- end
- stop_super = next
- next = getnext(next)
- setchar(nextnucleus,s)
- replaced[char] = (replaced[char] or 0) + 1
- if trace_normalizing then
- report_normalizing("superscript %C becomes %C",char,s)
- end
- else
- local s = subscripts[char]
+do
+
+ local unscript = { } noads.processors.unscript = unscript
+ local superscripts = characters.superscripts
+ local subscripts = characters.subscripts
+ local fractions = characters.fractions
+ local replaced = { }
+
+ local function replace(pointer,what,n,parent)
+ pointer = parent -- we're following the parent list (chars trigger this)
+ local next = getnext(pointer)
+ local start_super, stop_super, start_sub, stop_sub
+ local mode = "unset"
+ while next and getid(next) == math_noad do
+ local nextnucleus = getnucleus(next)
+ if nextnucleus and getid(nextnucleus) == math_char and not getsub(next) and not getsup(next) then
+ local char = getchar(nextnucleus)
+ local s = superscripts[char]
if s then
- if not start_sub then
- start_sub = next
- mode = "sub"
- elseif mode == "super" then
+ if not start_super then
+ start_super = next
+ mode = "super"
+ elseif mode == "sub" then
break
end
- stop_sub = next
+ stop_super = next
next = getnext(next)
setchar(nextnucleus,s)
replaced[char] = (replaced[char] or 0) + 1
if trace_normalizing then
- report_normalizing("subscript %C becomes %C",char,s)
+ report_normalizing("superscript %C becomes %C",char,s)
end
else
- break
+ local s = subscripts[char]
+ if s then
+ if not start_sub then
+ start_sub = next
+ mode = "sub"
+ elseif mode == "super" then
+ break
+ end
+ stop_sub = next
+ next = getnext(next)
+ setchar(nextnucleus,s)
+ replaced[char] = (replaced[char] or 0) + 1
+ if trace_normalizing then
+ report_normalizing("subscript %C becomes %C",char,s)
+ end
+ else
+ break
+ end
end
+ else
+ break
end
- else
- break
- end
- end
- if start_super then
- if start_super == stop_super then
- setsup(pointer,getnucleus(start_super))
- else
- local list = new_node(math_sub) -- todo attr
- setlist(list,start_super)
- setsup(pointer,list)
- end
- if mode == "super" then
- setnext(pointer,getnext(stop_super))
end
- setnext(stop_super)
- end
- if start_sub then
- if start_sub == stop_sub then
- setsub(pointer,getnucleus(start_sub))
- else
- local list = new_node(math_sub) -- todo attr
- setlist(list,start_sub)
- setsub(pointer,list)
+ if start_super then
+ if start_super == stop_super then
+ setsup(pointer,getnucleus(start_super))
+ else
+ local list = new_submlist() -- todo attr
+ setlist(list,start_super)
+ setsup(pointer,list)
+ end
+ if mode == "super" then
+ setnext(pointer,getnext(stop_super))
+ end
+ setnext(stop_super)
end
- if mode == "sub" then
- setnext(pointer,getnext(stop_sub))
+ if start_sub then
+ if start_sub == stop_sub then
+ setsub(pointer,getnucleus(start_sub))
+ else
+ local list = new_submlist() -- todo attr
+ setlist(list,start_sub)
+ setsub(pointer,list)
+ end
+ if mode == "sub" then
+ setnext(pointer,getnext(stop_sub))
+ end
+ setnext(stop_sub)
end
- setnext(stop_sub)
+ -- we could return stop
end
- -- we could return stop
-end
-unscript[math_char] = replace -- not noads as we need to recurse
+ unscript[math_char] = replace -- not noads as we need to recurse
+
+ function handlers.unscript(head,style,penalties)
+ processnoads(head,unscript,"unscript")
+ -- processnoads(head,checkers,"checkers")
+ return true
+ end
-function handlers.unscript(head,style,penalties)
- processnoads(head,unscript,"unscript")
--- processnoads(head,checkers,"checkers")
- return true
end
-local function collected(list)
- if list and next(list) then
- local n, t = 0, { }
- for k, v in sortedhash(list) do
- n = n + v
- t[#t+1] = formatters["%C"](k)
+do
+
+ local function collected(list)
+ if list and next(list) then
+ local n, t = 0, { }
+ for k, v in sortedhash(list) do
+ n = n + 1
+ t[n] = formatters["%C"](k)
+ end
+ return formatters["% t (n=%s)"](t,n)
end
- return formatters["% t (n=%s)"](t,n)
end
-end
-statistics.register("math script replacements", function()
- return collected(replaced)
-end)
+ statistics.register("math script replacements", function()
+ return collected(replaced)
+ end)
-statistics.register("unknown math characters", function()
- return collected(unknowns)
-end)
+ statistics.register("unknown math characters", function()
+ return collected(unknowns)
+ end)
+
+end
-- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$)
-- math alternates: (in lucidaot lgf: $ABC \mathalternate{italic} ABC$)
@@ -1050,7 +1145,7 @@ do
local last = 0
local known = setmetatableindex(function(t,k)
- local v = setbit(0,2^last)
+ local v = bor(0,2^last)
t[k] = v
last = last + 1
return v
@@ -1080,7 +1175,8 @@ do
alternates = alternates,
registered = registered,
presets = { },
-hashes = setmetatableindex("table")
+ resets = { },
+ hashes = setmetatableindex("table")
}
resources.mathalternates = mathalternates
end
@@ -1144,7 +1240,7 @@ hashes = setmetatableindex("table")
local fontdata = fonts.hashes.identifiers
local fontresources = fonts.hashes.resources
- local function getalternate(fam,tag)
+ local function getalternate(fam,tag,current)
local resources = fontresources[font_of_family(fam)]
local attribute = unsetvalue
if resources then
@@ -1152,25 +1248,37 @@ hashes = setmetatableindex("table")
if mathalternates then
local presets = mathalternates.presets
if presets then
+ local resets = mathalternates.resets
attribute = presets[tag]
if not attribute then
attribute = 0
local alternates = mathalternates.alternates
for s in gmatch(tag,"[^, ]+") do
- local a = alternates[s] -- or known[s]
- if a then
- attribute = attribute + a
+ if s == v_reset then
+ resets[tag] = true
+ current = unsetvalue
+ else
+ local a = alternates[s] -- or known[s]
+ if a then
+ attribute = bor(attribute,a)
+ end
end
end
if attribute == 0 then
attribute = unsetvalue
end
presets[tag] = attribute
+ elseif resets[tag] then
+ current = unsetvalue
end
end
end
end
- return attribute
+ if attribute > 0 and current and current > 0 then
+ return bor(current,attribute)
+ else
+ return attribute
+ end
end
local function presetalternate(fam,tag)
@@ -1185,10 +1293,7 @@ hashes = setmetatableindex("table")
local function setalternate(fam,tag)
local a = texgetattribute(a_mathalternate)
- local v = getalternate(fam,tag)
- if a and a > 0 then
- v = a + v
- end
+ local v = getalternate(fam,tag,a)
texsetattribute(a_mathalternate,v)
end
@@ -1212,7 +1317,7 @@ hashes = setmetatableindex("table")
local hashes = mathalternates.hashes
for i=1,#registered do
local r = registered[i]
- if hasbit(a,r) then
+ if band(a,r) ~= 0 then
local char = getchar(pointer)
local alt = hashes[i][char]
if alt == nil then
@@ -1259,66 +1364,59 @@ end
-- some juggling that we want to avoid but we need to do something here (in fact, we could
-- better fix the width of the character
-local a_mathitalics = privateattribute("mathitalics")
+do
+
+ local a_mathitalics = privateattribute("mathitalics")
-local italics = { }
-local default_factor = 1/20
+ local italics = { }
+ local default_factor = 1/20
-local setcolor = nodes.tracers.colors.set
-local resetcolor = nodes.tracers.colors.reset
-local italic_kern = new_kern
-local c_positive_d = "trace:dg"
-local c_negative_d = "trace:dr"
+ local setcolor = colortracers.set
+ local resetcolor = colortracers.reset
+ local italic_kern = new_kern
-local function insert_kern(current,kern)
- local sub = new_node(math_sub) -- todo: pool
- local noad = new_node(math_noad) -- todo: pool
- setlist(sub,kern)
- setnext(kern,noad)
- setnucleus(noad,current)
- return sub
-end
+ local c_positive_d = "trace:dg"
+ local c_negative_d = "trace:dr"
-registertracker("math.italics.visualize", function(v)
- if v then
- italic_kern = function(k)
- local n = new_kern(k)
- set_visual(n,"italic")
- return n
- end
- else
- italic_kern = new_kern
+ local function insert_kern(current,kern)
+ local sub = new_submlist()
+ local noad = new_noad()
+ setlist(sub,kern)
+ setnext(kern,noad)
+ setnucleus(noad,current)
+ return sub
end
-end)
-local function getcorrection(method,font,char) -- -- or character.italic -- (this one is for tex)
+ registertracker("math.italics.visualize", function(v)
+ if v then
+ italic_kern = function(k)
+ local n = new_kern(k)
+ set_visual(n,"italic")
+ return n
+ end
+ else
+ italic_kern = new_kern
+ end
+ end)
- local visual = chardata[char].visual
+ local function getcorrection(method,font,char) -- -- or character.italic -- (this one is for tex)
- if method == 1 then
- -- check on state
- local italics = fontitalics[font]
- if italics then
- local character = fontcharacters[font][char]
- if character then
- local correction = character.italic
- if correction and correction ~= 0 then
- return correction, visual
+ local visual = chardata[char].visual
+
+ if method == 1 then
+ -- check on state
+ local italics = fontitalics[font]
+ if italics then
+ local character = fontcharacters[font][char]
+ if character then
+ local correction = character.italic
+ if correction and correction ~= 0 then
+ return correction, visual
+ end
end
end
- end
- elseif method == 2 then
- -- no check
- local character = fontcharacters[font][char]
- if character then
- local correction = character.italic
- if correction and correction ~= 0 then
- return correction, visual
- end
- end
- elseif method == 3 then
- -- check on visual
- if visual == "it" or visual == "bi" then
+ elseif method == 2 then
+ -- no check
local character = fontcharacters[font][char]
if character then
local correction = character.italic
@@ -1326,109 +1424,121 @@ local function getcorrection(method,font,char) -- -- or character.italic -- (thi
return correction, visual
end
end
- end
- elseif method == 4 then
- -- combination of 1 and 3
- local italics = fontitalics[font]
- if italics and (visual == "it" or visual == "bi") then
- local character = fontcharacters[font][char]
- if character then
- local correction = character.italic
- if correction and correction ~= 0 then
- return correction, visual
+ elseif method == 3 then
+ -- check on visual
+ if visual == "it" or visual == "bi" then
+ local character = fontcharacters[font][char]
+ if character then
+ local correction = character.italic
+ if correction and correction ~= 0 then
+ return correction, visual
+ end
+ end
+ end
+ elseif method == 4 then
+ -- combination of 1 and 3
+ local italics = fontitalics[font]
+ if italics and (visual == "it" or visual == "bi") then
+ local character = fontcharacters[font][char]
+ if character then
+ local correction = character.italic
+ if correction and correction ~= 0 then
+ return correction, visual
+ end
end
end
end
- end
-end
+ end
-italics[math_char] = function(pointer,what,n,parent)
- local method = getattr(pointer,a_mathitalics)
- if method and method > 0 and method < 100 then
- local char = getchar(pointer)
- local font = getfont(pointer)
- local correction, visual = getcorrection(method,font,char)
- if correction and correction ~= 0 then
- local next_noad = getnext(parent)
- if not next_noad then
- if n == 1 then
- -- only at the outer level .. will become an option (always,endonly,none)
- if trace_italics then
- report_italics("method %a, flagging italic correction %p between %C and end math",method,correction,char)
- end
- if correction > 0 then
- correction = correction + 100
- else
- correction = correction - 100
+ italics[math_char] = function(pointer,what,n,parent)
+ local method = getattr(pointer,a_mathitalics)
+ if method and method > 0 and method < 100 then
+ local char = getchar(pointer)
+ local font = getfont(pointer)
+ local correction, visual = getcorrection(method,font,char)
+ if correction and correction ~= 0 then
+ local next_noad = getnext(parent)
+ if not next_noad then
+ if n == 1 then
+ -- only at the outer level .. will become an option (always,endonly,none)
+ if trace_italics then
+ report_italics("method %a, flagging italic correction %p between %C and end math",method,correction,char)
+ end
+ if correction > 0 then
+ correction = correction + 100
+ else
+ correction = correction - 100
+ end
+ setattr(pointer,a_mathitalics,correction)
+ setattr(parent,a_mathitalics,correction)
+ return -- so no reset later on
end
- setattr(pointer,a_mathitalics,correction)
- setattr(parent,a_mathitalics,correction)
end
end
end
+ setattr(pointer,a_mathitalics,unsetvalue)
end
-end
-
-function handlers.italics(head,style,penalties)
- processnoads(head,italics,"italics")
- return true
-end
-local enable
+ function handlers.italics(head,style,penalties)
+ processnoads(head,italics,"italics")
+ return true
+ end
-enable = function()
- enableaction("math", "noads.handlers.italics")
- if trace_italics then
- report_italics("enabling math italics")
+ local enable = function()
+ enableaction("math", "noads.handlers.italics")
+ if trace_italics then
+ report_italics("enabling math italics")
+ end
+ -- we enable math (unless already enabled elsewhere)
+ typesetters.italics.enablemath()
+ enable = false
end
- -- we enable math (unless already enabled elsewhere)
- typesetters.italics.enablemath()
- enable = false
-end
--- best do this only on math mode (less overhead)
+ -- best do this only on math mode (less overhead)
-function mathematics.setitalics(name)
- if enable then
- enable()
+ function mathematics.setitalics(name)
+ if enable then
+ enable()
+ end
+ texsetattribute(a_mathitalics,name and name ~= v_reset and tonumber(name) or unsetvalue) -- maybe also v_none
end
- texsetattribute(a_mathitalics,name and name ~= v_reset and tonumber(name) or unsetvalue) -- maybe also v_none
-end
-function mathematics.getitalics(name)
- if enable then
- enable()
+ function mathematics.getitalics(name)
+ if enable then
+ enable()
+ end
+ context(name and name ~= v_reset and tonumber(name) or unsetvalue)
end
- context(name and name ~= v_reset and tonumber(name) or unsetvalue)
-end
-function mathematics.resetitalics()
- texsetattribute(a_mathitalics,unsetvalue)
-end
+ function mathematics.resetitalics()
+ texsetattribute(a_mathitalics,unsetvalue)
+ end
-implement {
- name = "initializemathitalics",
- actions = enable,
- onlyonce = true,
-}
+ implement {
+ name = "initializemathitalics",
+ actions = enable,
+ onlyonce = true,
+ }
-implement {
- name = "setmathitalics",
- actions = mathematics.setitalics,
- arguments = "string",
-}
+ implement {
+ name = "setmathitalics",
+ actions = mathematics.setitalics,
+ arguments = "string",
+ }
-implement {
- name = "getmathitalics",
- actions = mathematics.getitalics,
- arguments = "string",
-}
+ implement {
+ name = "getmathitalics",
+ actions = mathematics.getitalics,
+ arguments = "string",
+ }
-implement {
- name = "resetmathitalics",
- actions = mathematics.resetitalics
-}
+ implement {
+ name = "resetmathitalics",
+ actions = mathematics.resetitalics
+ }
+
+end
do
@@ -1461,12 +1571,14 @@ do
local hash = setmetatableindex(function(t,font)
local g = fontdata[font].goodies
- local m = g and g[1].mathematics
+ local m = g and g[1] and g[1].mathematics
local k = m and m.kernpairs
t[font] = k
return k
end)
+ -- no correction after prime because that moved to a superscript
+
kernpairs[math_char] = function(pointer,what,n,parent)
if getattr(pointer,a_kernpairs) == 1 then
local font = getfont(pointer)
@@ -1506,243 +1618,372 @@ end
-- primes and such
-local collapse = { } processors.collapse = collapse
+do
+
+ -- is validpair stil needed?
+
+ local a_mathcollapsing = privateattribute("mathcollapsing")
+ local collapse = { }
+ local mathlists = characters.mathlists
+ local validpair = {
+ [noad_ord] = true,
+ [noad_rel] = true,
+ [noad_bin] = true, -- new
+ [noad_open] = true, -- new
+ [noad_close] = true, -- new
+ [noad_punct] = true, -- new
+ [noad_opdisplaylimits] = true,
+ [noad_oplimits] = true,
+ [noad_opnolimits] = true,
+ }
-local mathpairs = characters.mathpairs -- next will move to char-def
+ local reported = setmetatableindex("table")
+
+ collapse[math_char] = function(pointer,what,n,parent)
+
+ if parent and mathlists[getchar(pointer)] then
+ local found, last, lucleus, lsup, lsub, category
+ local tree = mathlists
+ local current = parent
+ while current and validpair[getsubtype(current)] do
+ local nucleus = getnucleus(current) -- == pointer
+ local sub = getsub(current)
+ local sup = getsup(current)
+ local char = getchar(nucleus)
+ if char then
+ local match = tree[char]
+ if match then
+ local method = getattr(current,a_mathcollapsing)
+ if method and method > 0 and method <= 3 then
+ local specials = match.specials
+ local mathlist = match.mathlist
+ local ligature
+ if method == 1 then
+ ligature = specials
+ elseif method == 2 then
+ ligature = specials or mathlist
+ else -- 3
+ ligature = mathlist or specials
+ end
+ if ligature then
+ category = mathlist and "mathlist" or "specials"
+ found = ligature
+ last = current
+ lucleus = nucleus
+ lsup = sup
+ lsub = sub
+ end
+ tree = match
+ if sub or sup then
+ break
+ else
+ current = getnext(current)
+ end
+ else
+ break
+ end
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if found and last and lucleus then
+ local id = getfont(lucleus)
+ local characters = fontcharacters[id]
+ local replace = characters and characters[found]
+ if not replace then
+ if not reported[id][found] then
+ reported[id][found] = true
+ report_collapsing("%s ligature %C from %s","ignoring",found,category)
+ end
+ elseif trace_collapsing then
+ report_collapsing("%s ligature %C from %s","creating",found,category)
+ end
+ setchar(pointer,found)
+ local l = getnext(last)
+ local c = getnext(parent)
+ if lsub then
+ setsub(parent,lsub)
+ setsub(last)
+ end
+ if lsup then
+ setsup(parent,lsup)
+ setsup(last)
+ end
+ while c ~= l do
+ local n = getnext(c)
+ flush_node(c)
+ c = n
+ end
+ setlink(parent,l)
+ end
+ end
+ end
+
+ function noads.handlers.collapse(head,style,penalties)
+ processnoads(head,collapse,"collapse")
+ return true
+ end
-mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034, [0x2034] = 0x2057 } -- (prime,prime) (prime,doubleprime) (prime,tripleprime)
-mathpairs[0x2033] = { [0x2032] = 0x2034, [0x2033] = 0x2057 } -- (doubleprime,prime) (doubleprime,doubleprime)
-mathpairs[0x2034] = { [0x2032] = 0x2057 } -- (tripleprime,prime)
+ local enable = function()
+ enableaction("math", "noads.handlers.collapse")
+ if trace_collapsing then
+ report_collapsing("enabling math collapsing")
+ end
+ enable = false
+ end
-mathpairs[0x2035] = { [0x2035] = 0x2036, [0x2036] = 0x2037 } -- (reversedprime,reversedprime) (reversedprime,doublereversedprime)
-mathpairs[0x2036] = { [0x2035] = 0x2037 } -- (doublereversedprime,reversedprime)
+ implement {
+ name = "initializemathcollapsing",
+ actions = enable,
+ onlyonce = true,
+ }
-mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
-mathpairs[0x222C] = { [0x222B] = 0x222D }
+end
-mathpairs[0x007C] = { [0x007C] = 0x2016, [0x2016] = 0x2980 } -- bar+bar=double bar+double=triple
-mathpairs[0x2016] = { [0x007C] = 0x2980 } -- double+bar=triple
+do
+ -- inner under over vcenter
+
+ local fixscripts = { }
+ local movesub = {
+ -- primes
+ [0x2032] = 0xFE932,
+ [0x2033] = 0xFE933,
+ [0x2034] = 0xFE934,
+ [0x2057] = 0xFE957,
+ -- reverse primes
+ [0x2035] = 0xFE935,
+ [0x2036] = 0xFE936,
+ [0x2037] = 0xFE937,
+ }
-local movesub = {
- -- primes
- [0x2032] = 0xFE932,
- [0x2033] = 0xFE933,
- [0x2034] = 0xFE934,
- [0x2057] = 0xFE957,
- -- reverse primes
- [0x2035] = 0xFE935,
- [0x2036] = 0xFE936,
- [0x2037] = 0xFE937,
-}
+ mathematics.virtualize(movesub)
-local validpair = {
- [noad_rel] = true,
- [noad_ord] = true,
- [noad_bin] = true, -- new
- [noad_punct] = true, -- new
- [noad_opdisplaylimits] = true,
- [noad_oplimits] = true,
- [noad_opnolimits] = true,
-}
+ local options_supported = tokens.defined("Unosuperscript")
-local function movesubscript(parent,current_nucleus,current_char)
- local prev = getprev(parent)
- if prev and getid(prev) == math_noad then
- if not getsup(prev) and not getsub(prev) then
- -- {f} {'}_n => f_n^'
- setchar(current_nucleus,movesub[current_char or getchar(current_nucleus)])
- local nucleus = getnucleus(parent)
- local sub = getsub(parent)
- local sup = getsup(parent)
- setsup(prev,nucleus)
- setsub(prev,sub)
- local dummy = copy_node(nucleus)
- setchar(dummy,0)
- setnucleus(parent,dummy)
- setsub(parent)
- if trace_collapsing then
- report_collapsing("fixing subscript")
+ local function fixsupscript(parent,current,current_char,new_char)
+ if new_char ~= current_char and new_char ~= true then
+ setchar(current,new_char)
+ if trace_fixing then
+ report_fixing("fixing subscript, replacing superscript %U by %U",current_char,new_char)
end
- elseif not getsup(prev) then
- -- {f} {'}_n => f_n^'
- setchar(current_nucleus,movesub[current_char or getchar(current_nucleus)])
- local nucleus = getnucleus(parent)
- local sup = getsup(parent)
- setsup(prev,nucleus)
- local dummy = copy_node(nucleus)
- setchar(dummy,0)
- setnucleus(parent,dummy)
- if trace_collapsing then
- report_collapsing("fixing subscript")
+ else
+ if trace_fixing then
+ report_fixing("fixing subscript, superscript %U",current_char)
end
end
+ if options_supported then
+ setfield(parent,"options",0x08+0x22)
+ end
end
-end
-local function collapsepair(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
- if parent then
- if validpair[getsubtype(parent)] then
- local current_nucleus = getnucleus(parent)
- if getid(current_nucleus) == math_char then
- local current_char = getchar(current_nucleus)
- if not getsub(parent) and not getsup(parent) then
- local mathpair = mathpairs[current_char]
- if mathpair then
- local next_noad = getnext(parent)
- if next_noad and getid(next_noad) == math_noad then
- if validpair[getsubtype(next_noad)] then
- local next_nucleus = getnucleus(next_noad)
- local next_char = getchar(next_nucleus)
- if getid(next_nucleus) == math_char then
- local newchar = mathpair[next_char]
- if newchar then
- local id = getfont(current_nucleus)
- local characters = fontcharacters[id]
- if characters and characters[newchar] then
- if trace_collapsing then
- report_collapsing("%U + %U => %U",current_char,next_char,newchar)
- end
- setchar(current_nucleus,newchar)
- local next_next_noad = getnext(next_noad)
- if next_next_noad then
- setlink(parent,next_next_noad)
- else
- setnext(parent)
- end
- setsup(parent,getsup(next_noad))
- setsub(parent,getsub(next_noad))
- setsup(next_noad)
- setsub(next_noad)
- flush_node(next_noad)
- collapsepair(pointer,what,n,parent,true)
- -- if not nested and movesub[current_char] then
- -- movesubscript(parent,current_nucleus,current_char)
- -- end
- end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
- end
+ -- local function movesubscript(parent,current_nucleus,oldchar,newchar)
+ -- local prev = getprev(parent)
+ -- if prev and getid(prev) == math_noad then
+ -- local psup = getsup(prev)
+ -- local psub = getsub(prev)
+ -- if not psup and not psub then
+ -- fixsupscript(prev,current_nucleus,oldchar,newchar)
+ -- local nucleus = getnucleus(parent)
+ -- local sub = getsub(parent)
+ -- setsup(prev,nucleus)
+ -- setsub(prev,sub)
+ -- local dummy = copy_node(nucleus)
+ -- setchar(dummy,0)
+ -- setnucleus(parent,dummy)
+ -- setsub(parent)
+ -- elseif not psup then
+ -- fixsupscript(prev,current_nucleus,oldchar,newchar)
+ -- local nucleus = getnucleus(parent)
+ -- setsup(prev,nucleus)
+ -- local dummy = copy_node(nucleus)
+ -- setchar(dummy,0)
+ -- setnucleus(parent,dummy)
+ -- end
+ -- end
+ -- end
+
+ local function move_none_none(parent,prev,nuc,oldchar,newchar)
+ fixsupscript(prev,nuc,oldchar,newchar)
+ local sub = getsub(parent)
+ setsup(prev,nuc)
+ setsub(prev,sub)
+ local dummy = copy_node(nuc)
+ setchar(dummy,0)
+ setnucleus(parent,dummy)
+ setsub(parent)
+ end
+
+ local function move_none_psub(parent,prev,nuc,oldchar,newchar)
+ fixsupscript(prev,nuc,oldchar,newchar)
+ setsup(prev,nuc)
+ local dummy = copy_node(nuc)
+ setchar(dummy,0)
+ setnucleus(parent,dummy)
+ end
+
+ fixscripts[math_char] = function(pointer,what,n,parent,nested) -- todo: switch to turn in on and off
+ if parent then
+ local oldchar = getchar(pointer)
+ local newchar = movesub[oldchar]
+ if newchar then
+ local nuc = getnucleus(parent)
+ if pointer == nuc then
+ local sub = getsub(pointer)
+ local sup = getsup(pointer)
+ if sub then
+ if sup then
+ -- print("[char] sub sup")
+ else
+ -- print("[char] sub ---")
+ end
+ elseif sup then
+ -- print("[char] --- sup")
+ else
+ local prev = getprev(parent)
+ if prev and getid(prev) == math_noad then
+ local psub = getsub(prev)
+ local psup = getsup(prev)
+ if psub then
+ if psup then
+ -- print("sub sup [char] --- ---")
+ else
+ -- print("sub --- [char] --- ---")
+ move_none_psub(parent,prev,nuc,oldchar,newchar)
end
+ elseif psup then
+ -- print("--- sup [char] --- ---")
+ else
+ -- print("[char] --- ---")
+ move_none_none(parent,prev,nuc,oldchar,newchar)
end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
+ else
+ -- print("no prev [char]")
end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
end
- elseif not nested and movesub[current_char] then
- movesubscript(parent,current_nucleus,current_char)
+ else
+ -- print("[char]")
end
end
end
end
-end
-collapse[math_char] = collapsepair
+ function noads.handlers.fixscripts(head,style,penalties)
+ processnoads(head,fixscripts,"fixscripts")
+ return true
+ end
-function noads.handlers.collapse(head,style,penalties)
- processnoads(head,collapse,"collapse")
- return true
end
-- variants
-local variants = { }
-
-local validvariants = { -- fast check on valid
- [0x2229] = 0xFE00, [0x222A] = 0xFE00,
- [0x2268] = 0xFE00, [0x2269] = 0xFE00,
- [0x2272] = 0xFE00, [0x2273] = 0xFE00,
- [0x228A] = 0xFE00, [0x228B] = 0xFE00,
- [0x2293] = 0xFE00, [0x2294] = 0xFE00,
- [0x2295] = 0xFE00,
- [0x2297] = 0xFE00,
- [0x229C] = 0xFE00,
- [0x22DA] = 0xFE00, [0x22DB] = 0xFE00,
- [0x2A3C] = 0xFE00, [0x2A3D] = 0xFE00,
- [0x2A9D] = 0xFE00, [0x2A9E] = 0xFE00,
- [0x2AAC] = 0xFE00, [0x2AAD] = 0xFE00,
- [0x2ACB] = 0xFE00, [0x2ACC] = 0xFE00,
-}
+do
-variants[math_char] = function(pointer,what,n,parent) -- also set export value
- local char = getchar(pointer)
- local selector = validvariants[char]
- if selector then
- local next = getnext(parent)
- if next and getid(next) == math_noad then
- local nucleus = getnucleus(next)
- if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
- local variant
- local tfmdata = fontdata[getfont(pointer)]
- local mathvariants = tfmdata.resources.variants -- and variantdata
- if mathvariants then
- mathvariants = mathvariants[selector]
+ local variants = { }
+ local validvariants = { -- fast check on valid
+ [0x2229] = 0xFE00, [0x222A] = 0xFE00,
+ [0x2268] = 0xFE00, [0x2269] = 0xFE00,
+ [0x2272] = 0xFE00, [0x2273] = 0xFE00,
+ [0x228A] = 0xFE00, [0x228B] = 0xFE00,
+ [0x2293] = 0xFE00, [0x2294] = 0xFE00,
+ [0x2295] = 0xFE00,
+ [0x2297] = 0xFE00,
+ [0x229C] = 0xFE00,
+ [0x22DA] = 0xFE00, [0x22DB] = 0xFE00,
+ [0x2A3C] = 0xFE00, [0x2A3D] = 0xFE00,
+ [0x2A9D] = 0xFE00, [0x2A9E] = 0xFE00,
+ [0x2AAC] = 0xFE00, [0x2AAD] = 0xFE00,
+ [0x2ACB] = 0xFE00, [0x2ACC] = 0xFE00,
+ }
+
+ variants[math_char] = function(pointer,what,n,parent) -- also set export value
+ local char = getchar(pointer)
+ local selector = validvariants[char]
+ if selector then
+ local next = getnext(parent)
+ if next and getid(next) == math_noad then
+ local nucleus = getnucleus(next)
+ if nucleus and getid(nucleus) == math_char and getchar(nucleus) == selector then
+ local variant
+ local tfmdata = fontdata[getfont(pointer)]
+ local mathvariants = tfmdata.resources.variants -- and variantdata
if mathvariants then
- variant = mathvariants[char]
- end
- end
- if variant then
- setchar(pointer,variant)
- setattr(pointer,a_exportstatus,char) -- we don't export the variant as it's visual markup
- if trace_variants then
- report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
+ mathvariants = mathvariants[selector]
+ if mathvariants then
+ variant = mathvariants[char]
+ end
end
- else
- if trace_variants then
- report_variants("no variant (%U,%U)",char,selector)
+ if variant then
+ setchar(pointer,variant)
+ setattr(pointer,a_exportstatus,char) -- we don't export the variant as it's visual markup
+ if trace_variants then
+ report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
+ end
+ else
+ if trace_variants then
+ report_variants("no variant (%U,%U)",char,selector)
+ end
end
+ setprev(next,pointer)
+ setnext(parent,getnext(next))
+ flush_node(next)
end
- setprev(next,pointer)
- setnext(parent,getnext(next))
- flush_node(next)
end
end
end
-end
-function handlers.variants(head,style,penalties)
- processnoads(head,variants,"unicode variant")
- return true
+ function handlers.variants(head,style,penalties)
+ processnoads(head,variants,"unicode variant")
+ return true
+ end
+
end
-- for manuals
-local classes = { }
-
-local colors = {
- [noad_rel] = "trace:dr",
- [noad_ord] = "trace:db",
- [noad_bin] = "trace:dg",
- [noad_open] = "trace:dm",
- [noad_close] = "trace:dm",
- [noad_punct] = "trace:dc",
- -- [noad_opdisplaylimits] = "",
- -- [noad_oplimits] = "",
- -- [noad_opnolimits] = "",
- -- [noad_inner = "",
- -- [noad_under = "",
- -- [noad_over = "",
- -- [noad_vcenter = "",
-}
+do
-classes[math_char] = function(pointer,what,n,parent)
- local color = colors[getsubtype(parent)]
- if color then
- setcolor(pointer,color)
- else
- resetcolor(pointer)
+ local classes = { }
+ local colors = {
+ [noad_rel] = "trace:dr",
+ [noad_ord] = "trace:db",
+ [noad_bin] = "trace:dg",
+ [noad_open] = "trace:dm",
+ [noad_close] = "trace:dm",
+ [noad_punct] = "trace:dc",
+ -- [noad_opdisplaylimits] = "",
+ -- [noad_oplimits] = "",
+ -- [noad_opnolimits] = "",
+ -- [noad_inner = "",
+ -- [noad_under = "",
+ -- [noad_over = "",
+ -- [noad_vcenter = "",
+ }
+
+ local setcolor = colortracers.set
+ local resetcolor = colortracers.reset
+
+ classes[math_char] = function(pointer,what,n,parent)
+ local color = colors[getsubtype(parent)]
+ if color then
+ setcolor(pointer,color)
+ else
+ resetcolor(pointer)
+ end
end
-end
-function handlers.classes(head,style,penalties)
- processnoads(head,classes,"classes")
- return true
-end
+ function handlers.classes(head,style,penalties)
+ processnoads(head,classes,"classes")
+ return true
+ end
-registertracker("math.classes",function(v)
- setaction("math","noads.handlers.classes",v)
-end)
+ registertracker("math.classes",function(v)
+ setaction("math","noads.handlers.classes",v)
+ end)
+
+end
-- experimental
@@ -1760,9 +2001,7 @@ do
local categories = { }
local numbers = { }
local a_mathdomain = privateattribute("mathdomain")
-
mathematics.domains = categories
-
local permitted = {
ordinary = noad_ord,
binary = noad_bin,
@@ -1915,22 +2154,44 @@ end)
-- also for me
-local applyvisuals = nuts.applyvisuals
-local visual = false
+do
-function handlers.makeup(head)
- applyvisuals(tonut(head),visual)
-end
+ local applyvisuals = nuts.applyvisuals
+ local visual = false
-registertracker("math.makeup",function(v)
- visual = v
- setaction("math","noads.handlers.makeup",v)
-end)
+ function handlers.makeup(head)
+ applyvisuals(tonut(head),visual)
+ end
+
+ registertracker("math.makeup",function(v)
+ visual = v
+ setaction("math","noads.handlers.makeup",v)
+ end)
+
+end
-- the normal builder
-function builders.kernel.mlist_to_hlist(head,style,penalties)
- return mlist_to_hlist(head,style,penalties), true
+do
+
+ local force_penalties = false
+
+ -- registertracker("math.penalties",function(v)
+ -- force_penalties = v
+ -- end)
+
+ function builders.kernel.mlist_to_hlist(head,style,penalties)
+ return mlist_to_hlist(head,style,force_penalties or penalties), true
+ end
+
+ implement {
+ name = "setmathpenalties",
+ arguments = "integer",
+ actions = function(p)
+ force_penalties = p > 0
+ end,
+ }
+
end
-- function builders.kernel.mlist_to_hlist(head,style,penalties)
diff --git a/tex/context/base/mkiv/math-pln.mkiv b/tex/context/base/mkiv/math-pln.mkiv
index 754cb6141..d0e7e377d 100644
--- a/tex/context/base/mkiv/math-pln.mkiv
+++ b/tex/context/base/mkiv/math-pln.mkiv
@@ -138,4 +138,114 @@
\normalstartimath\tabskip\zeroskip\everycr\emptytoks\hfil\displaystyle\alignmark\alignmark\hfil\normalstopimath}\crcr
#1\crcr}}
+%D This comes from plain but I gave it a \CONTEXT\ touch:
+
+\def\math_border_NC_indeed
+ {\gdef\math_border_NC{\aligntab}}
+
+\let\math_border_NC\math_border_NC_indeed
+
+\def\math_border_NR
+ {\glet\math_border_NC\math_border_NC_indeed
+ \cr}
+
+\unexpanded\def\math_border_matrix#1#2#3% adapted plain
+ {\begingroup
+ %
+ \pushmacro\cr
+ %
+ \setbox\scratchbox\hbox\bgroup
+ \mr B%
+ \egroup
+ \scratchdimenone\wd\scratchbox
+ %
+ \scratchdimentwo.2\bodyfontsize
+ %
+ \mathsurround\zeropoint
+ %
+ \def\NC{\math_border_NC}%
+ \def\NR{\math_border_NR}%
+ %
+ \setbox\scratchboxone\vbox\bgroup
+ \def\cr{%
+ \crcr
+ \noalign\bgroup
+ \kern\scratchdimentwo
+ \global\let\cr\endline
+ \egroup
+ }%
+ \ialign{%
+ \startimath\alignmark\alignmark\stopimath
+ \hfil
+ \kern\dimexpr\scratchdimentwo+\scratchdimenone\relax
+ \aligntab
+ \thinspace
+ \hfil
+ \startimath\alignmark\alignmark\stopimath
+ \hfil
+ \aligntab
+ \aligntab
+ \quad
+ \hfil
+ \startimath\alignmark\alignmark\stopimath
+ \hfil
+ \crcr
+ \omit
+ \strut
+ \hfil
+ \crcr
+ \noalign{\kern-\baselineskip}%
+ #3\crcr
+ \omit
+ \strut
+ \cr
+ }%
+ \egroup
+ \setbox\scratchboxtwo\vbox\bgroup
+ \unvcopy\scratchboxone
+ \global\setbox\globalscratchbox\lastbox
+ \egroup
+ \setbox\scratchboxtwo\hbox\bgroup
+ \unhbox\globalscratchbox
+ \unskip
+ \global\setbox\globalscratchbox\lastbox
+ \egroup
+ \setbox\scratchboxtwo\hbox\bgroup
+ \startimath
+ \kern\dimexpr\wd\globalscratchbox-\scratchdimenone\relax
+ \left#1\relax
+ \kern-\wd\globalscratchbox
+ \global\setbox\globalscratchbox\vbox\bgroup
+ \box\globalscratchbox
+ \kern\scratchdimentwo
+ \egroup
+ \vcenter\bgroup
+ \kern-\ht\globalscratchbox
+ \unvbox\scratchboxone
+ \kern-\baselineskip
+ \egroup
+ \thinspace
+ \right#2\relax
+ \stopimath
+ \egroup
+ \null
+ \thickspace
+ \vbox\bgroup
+ \kern\ht\globalscratchbox
+ \box\scratchboxtwo
+ \egroup
+ %
+ \popmacro\cr
+ %
+ \endgroup}
+
+\unexpanded\def\bordermatrix {\math_border_matrix()}
+\unexpanded\def\bbordermatrix{\math_border_matrix[]}
+
+\unexpanded\def\startbordermatrix #1\stopbordermatrix {\math_border_matrix(){#1}}
+\unexpanded\def\startbbordermatrix#1\stopbbordermatrix{\math_border_matrix[]{#1}}
+
+\let\stopbordermatrix \relax
+\let\stopbbordermatrix\relax
+
\protect \endinput
diff --git a/tex/context/base/mkiv/math-rad.mkvi b/tex/context/base/mkiv/math-rad.mkvi
index 699a1a125..c0b128a61 100644
--- a/tex/context/base/mkiv/math-rad.mkvi
+++ b/tex/context/base/mkiv/math-rad.mkvi
@@ -240,13 +240,10 @@
\d_overlay_linewidth\linewidth
\edef\overlaylinecolor{\mathornamentparameter\c!color}%
\edef\p_mp{\mathornamentparameter\c!mp}%
- % thw width of the graphic determines the width of the final result
+ % the width of the graphic determines the width of the final result
\setbox\scratchbox\hpack{\uniqueMPgraphic{\p_mp}}% todo: add code key + tag
- \scratchdimen \wd\scratchbox
- % \scratchtopoffset \dimexpr\scratchoffset+\dp\nextbox\relax
- % \scratchbottomoffset\dimexpr\scratchoffset+\ht\nextbox/2\relax
- \hpack to \scratchdimen{\hss\box\nextbox\hss}%
- \hskip-\scratchdimen
+ \hpack to \wd\scratchbox{\hss\box\nextbox\hss}%
+ \hskip-\wd\scratchbox
\box\scratchbox
\endgroup}
diff --git a/tex/context/base/mkiv/math-spa.lua b/tex/context/base/mkiv/math-spa.lua
new file mode 100644
index 000000000..92ee662b9
--- /dev/null
+++ b/tex/context/base/mkiv/math-spa.lua
@@ -0,0 +1,89 @@
+if not modules then modules = { } end modules ['math-spa'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- for the moment (when testing) we use a penalty 1
+
+local penalty_code = nodes.nodecodes.penalty
+local glue_code = nodes.nodecodes.glue
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+local getid = nuts.getid
+local getnext = nuts.getnext
+local getwidth = nuts.getwidth
+local setglue = nuts.setglue
+local getpenalty = nuts.getpenalty
+local setpenalty = nuts.setpenalty
+
+local traverse_id = nuts.traverse_id
+local get_dimensions = nuts.dimensions
+
+
+local texsetdimen = tex.setdimen
+
+local v_none = interfaces.variables.none
+local v_auto = interfaces.variables.auto
+
+local method = v_none
+local distance = 0
+
+function noads.handlers.align(l)
+ if method ~= v_none then
+ local h = tonut(l)
+ if method == v_auto then
+ local s = h
+ while s do
+ local id = getid(s)
+ local n = getnext(s)
+ if id == penalty_code and getpenalty(s) == 1 then
+ setpenalty(s,0)
+ if n and getid(n) == glue_code then
+ s = n
+ n = getnext(s)
+ end
+ local w = get_dimensions(h,n) + distance
+ texsetdimen("global","d_strc_math_indent",w)
+ break
+ end
+ s = n
+ end
+ else
+ texsetdimen("global","d_strc_math_indent",distance)
+ end
+ for n in traverse_id(glue_code,h) do
+ setglue(n,getwidth(n),0,0)
+ end
+ else
+ -- texsetdimen("global","d_strc_math_indent",0)
+ end
+ return l, true
+end
+
+interfaces.implement {
+ name = "setmathhang",
+ arguments = {
+ {
+ { "method", "string" },
+ { "distance", "dimension" },
+ }
+ },
+ actions = function(t)
+ method = t.method or v_none
+ distance = t.distance or 0
+ end
+}
+
+interfaces.implement {
+ name = "resetmathhang",
+ actions = function(t)
+ method = v_none
+ distance = 0
+ end
+}
+
diff --git a/tex/context/base/mkiv/math-stc.mkvi b/tex/context/base/mkiv/math-stc.mkvi
index 208e756f6..c9d469d61 100644
--- a/tex/context/base/mkiv/math-stc.mkvi
+++ b/tex/context/base/mkiv/math-stc.mkvi
@@ -69,12 +69,20 @@
% currently no italic correction ... problem is that we don't know yet if we have an italic
% below so we we need to postpone
-\def\math_stackers_fallback
- {\hbox to \scratchwidth{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname}}
- %{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname }
+% \def\math_stackers_fallback
+% {\hbox to \scratchwidth{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname}}
+% %{\csname\??mathextensiblefallbacks\ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname\number\scratchunicode\fi\endcsname }
-% \def\math_stackers_regular
-% {\mathstylehbox{\Umathaccent\fam\zerocount\scratchunicode{\hskip\scratchwidth}}}
+\def\math_stackers_fallback
+ {\mathstylehbox to \scratchwidth{\usemathstackerscolorparameter\c!color
+ \hss
+ \hskip\mathstackersparameter\c!topoffset\relax % for manual italic correction
+ \ifcsname\??mathextensiblefallbacks\number\scratchunicode\endcsname
+ \lastnamedcs
+ \else
+ \Umathchar \fam \zerocount \scratchunicode
+ \fi
+ \hss}}
\def\math_stackers_regular
{\mathstylehbox{\usemathstackerscolorparameter\c!color
@@ -89,7 +97,8 @@
% $\Umathaccent 0 0 "2190{x}$ \par $\Umathaccent 0 0 "27F8{x}$\par
% $\Udelimiterunder 0 "2190{x}$ \par $\Udelimiterunder 0 "27F8{x}$\par
-\setvalue{\??mathextensiblefallbacks}{\hbox{\vrule\!!width\scratchwidth\!!height.1\exheight\!!depth\zeropoint}}
+\setvalue{\??mathextensiblefallbacks}%
+ {\hpack{\vrule\s!width\scratchwidth\s!height.1\mathexheight\s!depth\zeropoint}}
% \def\math_stackers_with_fallback#codepoint%
% {\begingroup
@@ -151,16 +160,13 @@
\c!right=,
\c!mathclass=\s!rel,
\c!alternative=\v!normal,
- \c!mp=math:stacker:\number\scratchunicode,
- \c!mpheight=\exheight,
- \c!mpdepth=\exheight,
- \c!mpoffset=.25\exheight,
- \c!voffset=.25\exheight,
- \c!hoffset=.5\emwidth,
+ \c!voffset=.25\mathexheight,
+ \c!hoffset=\zeropoint,
+ \c!topoffset=\zeropoint, % for manual italic correction
\c!distance=\mathstackersparameter\c!voffset, % distance between symbol and base (can be different from voffset)
- \c!minheight=\exheight,
+ \c!minheight=\mathexheight,
\c!mindepth=\zeropoint,
- \c!minwidth=.5\emwidth,
+ \c!minwidth=.5\mathemwidth,
\c!order=\v!normal,
\c!strut=,
\c!color=, % todo: when I need it
@@ -220,16 +226,46 @@
\letvalue{\??mathstackersalternative\v!normal }\math_stackers_content
\letvalue{\??mathstackersalternative\v!default}\math_stackers_content
+\setupmathstackers
+ [\c!mp=math:stacker:\number\scratchunicode,
+ \c!mpheight=\mathcharht\scratchunicode,
+ \c!mpdepth=\mathchardp\scratchunicode,
+ \c!mpoffset=.25\mathexheight]
+
+% \setvalue{\??mathstackersalternative\v!mp}%
+% {\hbox\bgroup % todo: add code key + tag
+% \d_overlay_width \scratchwidth
+% \d_overlay_height \dimexpr\mathstackersparameter\c!mpheight
+% \d_overlay_depth \dimexpr\mathstackersparameter\c!mpdepth
+% \d_overlay_offset \dimexpr\mathstackersparameter\c!mpoffset
+% \d_overlay_linewidth\linewidth
+% \edef\overlaylinecolor{\mathstackersparameter\c!color}%
+% \edef\p_mp{\mathstackersparameter\c!mp}%
+% \uniqueMPgraphic{\p_mp}%
+% \egroup}
+
\setvalue{\??mathstackersalternative\v!mp}%
- {\hbox\bgroup % todo: add code key + tag
+ {\normalexpanded{\math_stackers_mp_box
+ {\the\dimexpr\mathstackersparameter\c!mpheight}%
+ {\the\dimexpr\mathstackersparameter\c!mpdepth}%
+ {\the\dimexpr\mathstackersparameter\c!mpoffset}%
+ {\the\dimexpr\triggeredmathstyleparameter\Umathfractionrule}%
+ {\the\dimexpr\triggeredmathstyleparameter\Umathaxis}%
+ {\the\mathexheight}%
+ {\the\mathemwidth}%
+ }}
+
+\unexpanded\def\math_stackers_mp_box#1#2#3#4#5#6#7%
+ {\hpack\bgroup % todo: add code key + tag
+ % we can speed up \mathexheight expansion a bit
\d_overlay_width \scratchwidth
- \d_overlay_height \dimexpr\mathstackersparameter\c!mpheight
- \d_overlay_depth \dimexpr\mathstackersparameter\c!mpdepth
- \d_overlay_offset \dimexpr\mathstackersparameter\c!mpoffset
- \d_overlay_linewidth\linewidth
+ \d_overlay_height #1\relax
+ \d_overlay_depth #2\relax
+ \d_overlay_offset #3\relax
+ \d_overlay_linewidth#4\relax
\edef\overlaylinecolor{\mathstackersparameter\c!color}%
\edef\p_mp{\mathstackersparameter\c!mp}%
- \uniqueMPgraphic{\p_mp}%
+ \uniqueMPgraphic{\p_mp}{axis=#5,ex=#6,em=#7}%
\egroup}
\def\math_stackers_check_unicode#codepoint%
@@ -241,7 +277,7 @@
% heads/tails + hoffset
\else\ifx\p_offset\v!min
% heads/tails - hoffset
- \advance\scratchleftoffset-\scratchhoffset
+ \advance\scratchleftoffset -\scratchhoffset
\advance\scratchrightoffset-\scratchhoffset
\else % \v!normal
% hoffset
@@ -288,15 +324,27 @@
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\p_offset {\mathstackersparameter\c!offset}%
\edef\p_location {\mathstackersparameter\c!location}%
- \edef\p_order {\mathstackersparameter\c!order}%
\edef\p_strut {\mathstackersparameter\c!strut}%
\edef\p_alternative{\mathstackersparameter\c!alternative}%
- \ifx\p_order\v!reverse
- \edef\m_math_stackers_text_top {#bottomtext}%
- \edef\m_math_stackers_text_bottom{#toptext}%
- \else
- \edef\m_math_stackers_text_top {#toptext}%
- \edef\m_math_stackers_text_bottom{#bottomtext}%
+ % \ifx\p_order\v!reverse
+ % \ifsecondargument
+ % \edef\m_math_stackers_text_top {#bottomtext}%
+ % \edef\m_math_stackers_text_bottom{#toptext}%
+ % \else
+ % \edef\m_math_stackers_text_top {#toptext}%
+ % \let\m_math_stackers_text_bottom \empty
+ % \fi
+ % \else
+ % \edef\m_math_stackers_text_top {#toptext}%
+ % \edef\m_math_stackers_text_bottom{#bottomtext}%
+ % \fi
+ \edef\m_math_stackers_text_top {#toptext}%
+ \edef\m_math_stackers_text_bottom{#bottomtext}%
+ \ifsecondargument
+ \edef\p_order{\mathstackersparameter\c!order}%
+ \ifx\p_order\v!reverse
+ \swapmacros\m_math_stackers_text_top\m_math_stackers_text_bottom
+ \fi
\fi
\scratchleftoffset \zeropoint
\scratchrightoffset\zeropoint
@@ -317,13 +365,20 @@
\fi
%
\ifcase#method\relax
- \scratchwidth\wd
- \ifdim\wd\scratchboxone>\wd\scratchboxtwo
- \scratchboxone
- \else
- \scratchboxtwo
- \fi
- \relax
+ % e.g. extensible
+ %\scratchwidth\wd
+ % \ifdim\wd\scratchboxone>\wd\scratchboxtwo
+ % \scratchboxone
+ % \else
+ % \scratchboxtwo
+ % \fi
+ %\relax
+ \scratchwidth\mathcharwd\scratchunicode
+ \ifdim\wd\scratchboxone>\scratchwidth
+ \scratchwidth\wd\scratchboxone
+ \else\ifdim\wd\scratchboxtwo>\scratchwidth
+ \scratchwidth\wd\scratchboxtwo
+ \fi\fi
\else
\ifx\m_math_stackers_text_middle\empty
\setbox\scratchboxthree\emptyhbox
@@ -645,21 +700,50 @@
\unexpanded\def\mathunder {\begingroup\dosingleempty\math_stackers_handle_under }
\unexpanded\def\mathdouble{\begingroup\dodoubleempty\math_stackers_handle_double}
-\def\math_stackers_handle_over[#category]%
- {\math_stackers_direct_double\plusone\zerocount
- {\iffirstargument#category\else\v!top \fi}} % will be defined later on
+\unexpanded\def\definemathover {\dotripleargument \math_stackers_define_over }
+\unexpanded\def\definemathunder {\dotripleargument \math_stackers_define_under }
+\unexpanded\def\definemathdouble{\doquadrupleargument\math_stackers_define_double}
+
+\def\math_stackers_define_over[#category][#command][#topcode]%
+ {\setuvalue{#command}{\math_stackers_handle_direct\plusone\zerocount{#category}{#topcode}{0}}}
-\def\math_stackers_handle_under[#category]%
- {\math_stackers_direct_double\zerocount\plusone
- {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+\def\math_stackers_define_under[#category][#command][#bottomcode]%
+ {\setuvalue{#command}{\math_stackers_handle_direct\zerocount\plusone{#category}{#bottomcode}{0}}}
-\def\math_stackers_handle_double[#topcategory][#bottomcategory]%
- {\math_stackers_direct_double\plusone\plusone
- {\iffirstargument #topcategory\else\v!top \fi}%
- {\ifsecondargument#bottomcategory\else\v!bottom\fi}}
+\def\math_stackers_define_double[#category][#command][#topcode][#bottomcode]%
+ {\setuvalue{#command}{\math_stackers_handle_direct\plusone\plusone{#category}{#topcode}{#bottomcode}}}
-\def\math_stackers_direct_double#top#bottom#category#codepoint#text%
- {\math_stackers_make_double#top#bottom{#category}{#codepoint}{0}{#text}%
+\unexpanded\def\mathover {\begingroup\dosingleempty\math_stackers_handle_over }
+\unexpanded\def\mathunder {\begingroup\dosingleempty\math_stackers_handle_under }
+\unexpanded\def\mathdouble{\begingroup\dosingleempty\math_stackers_handle_double}
+
+\def\math_stackers_handle_over[#category]#topcode#text%
+ {\math_stackers_make_double\plusone\zerocount
+ {\iffirstargument#category\else\v!top\fi}%
+ {#topcode}%
+ {0}%
+ {#text}%
+ \endgroup}
+
+\def\math_stackers_handle_under[#category]#bottomcode#text%
+ {\math_stackers_make_double\zerocount\plusone
+ {\iffirstargument#category\else\v!bottom\fi}%
+ {#bottomcode}%
+ {0}%
+ {#text}%
+ \endgroup}
+
+\def\math_stackers_handle_double[#category]#topcode#bottomcode#text%
+ {\math_stackers_make_double\plusone\plusone
+ {\iffirstargument#category\else\v!both\fi}%
+ {#topcode}%
+ {#bottomcode}%
+ {#text}%
+ \endgroup}
+
+\def\math_stackers_handle_direct#top#bottom#category#topcode#bottomcode#text%
+ {\begingroup
+ \math_stackers_make_double#top#bottom{#category}{#topcode}{#bottomcode}{#text}%
\endgroup}
%D A relative new one is a combination of accents and text (as needed in mathml):
@@ -981,10 +1065,10 @@
%D Here come the new ones:
\definemathstackers [\v!none] [\v!mathematics] [\c!hoffset=\zeropoint]
-\definemathstackers [\v!normal] [\v!mathematics] [\c!hoffset=0.5\emwidth] % the default
-\definemathstackers [\v!small] [\v!mathematics] [\c!hoffset=1\emwidth]
-\definemathstackers [\v!medium] [\v!mathematics] [\c!hoffset=1.5\emwidth]
-\definemathstackers [\v!big] [\v!mathematics] [\c!hoffset=2\emwidth]
+\definemathstackers [\v!normal] [\v!mathematics] [\c!hoffset=0.5\mathemwidth] % the default
+\definemathstackers [\v!small] [\v!mathematics] [\c!hoffset=1\mathemwidth]
+\definemathstackers [\v!medium] [\v!mathematics] [\c!hoffset=1.5\mathemwidth]
+\definemathstackers [\v!big] [\v!mathematics] [\c!hoffset=2\mathemwidth]
\definemathextensible [\v!reverse] [xrel] ["002D]
\definemathextensible [\v!reverse] [xequal] ["003D]
@@ -1146,7 +1230,7 @@
[\c!offset=\v!max,
\c!left=\enspace,
\c!right=\enspace,
- \c!hoffset=.5\emwidth]
+ \c!hoffset=.5\mathemwidth]
\definemathextensible [\v!chemistry] [cleftarrow] ["2190]
\definemathextensible [\v!chemistry] [crightarrow] ["2192]
@@ -1197,14 +1281,14 @@
\unexpanded\def\defineextensiblefiller
{\dodoubleargument\math_stackers_define_filler}
-\def\math_stackers_define_filler[#1][#2]%
- {\setuevalue{#1}{\leaders\number#2\hfill}}
+% \def\math_stackers_define_filler[#1][#2]%
+% {\setuevalue{#1}{\leaders\number#2\hfill}}
-%D For the moment:
+%D For the moment (needs checking):
\def\math_stackers_define_filler[#1][#2]%
{\expandafter\let\csname\??mathextensiblefallbacks\number#2\expandafter\endcsname\csname#1\endcsname
- \expandafter\let\csname #1\expandafter\endcsname\csname#1\endcsname}
+ \expandafter\let\csname #1\expandafter\endcsname\csname#1\endcsname} % huh?
\defineextensiblefiller [barfill] ["203E]
\defineextensiblefiller [relfill] ["002D]
@@ -1283,6 +1367,18 @@
\definemathunstacked [\v!wide] [implies] ["27F9] % \mathrel{\;\Longrightarrow\;}
\definemathunstacked [\v!wide] [iff] ["27FA] % \mathrel{\;\Longleftrightarrow\;}
+% New (an example of using \mathexheight):
+
+\definemathstackers
+ [\v!symbol]
+ [\c!voffset=-.3\mathexheight,
+ \c!hoffset=\zeropoint,
+ \c!mathclass=ord,
+ \c!topoffset=.4\mathemwidth, % poor man's italic correction
+ \c!middlecommand=\mathematics]
+
+\definemathover[\v!symbol][interiorset]["2218]
+
\protect \endinput
% \mathrel{\mathop{\hbox to \dimen0{\hss\copy4\hss}}
diff --git a/tex/context/base/mkiv/math-tag.lua b/tex/context/base/mkiv/math-tag.lua
index 13c8fffc7..d1ed90d38 100644
--- a/tex/context/base/mkiv/math-tag.lua
+++ b/tex/context/base/mkiv/math-tag.lua
@@ -33,6 +33,10 @@ local setattr = nuts.setattr
local getcomponents = nuts.getcomponents
local getwidth = nuts.getwidth
+local getnucleus = nuts.getnucleus
+local getsub = nuts.getsub
+local getsup = nuts.getsup
+
local set_attributes = nuts.setattributes
local traverse_nodes = nuts.traverse
@@ -60,6 +64,7 @@ local math_fixed_both = accentcodes.fixedboth
local kerncodes = nodes.kerncodes
local fontkern_code = kerncodes.fontkern
+local italickern_code = kerncodes.italickern
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
@@ -74,7 +79,6 @@ local processnoads = noads.process
local a_tagged = attributes.private('tagged')
local a_mathcategory = attributes.private('mathcategory')
local a_mathmode = attributes.private('mathmode')
-local a_fontkern = attributes.private('fontkern')
local tags = structures.tags
@@ -85,7 +89,7 @@ local taglist = tags.taglist
local chardata = characters.data
-local getmathcode = tex.getmathcode
+local getmathcodes = tex.getmathcodes
local mathcodes = mathematics.codes
local ordinary_code = mathcodes.ordinary
local variable_code = mathcodes.variable
@@ -101,9 +105,9 @@ local function processsubsup(start)
-- At some point we might need to add an attribute signaling the
-- super- and subscripts because TeX and MathML use a different
-- order. The mrows are needed to keep mn's separated.
- local nucleus = getfield(start,"nucleus")
- local sup = getfield(start,"sup")
- local sub = getfield(start,"sub")
+ local nucleus = getnucleus(start)
+ local sup = getsup(start)
+ local sub = getsub(start)
if sub then
if sup then
setattr(start,a_tagged,start_tagged("msubsup"))
@@ -190,7 +194,7 @@ process = function(start) -- we cannot use the processor as we have no finalizer
mtexttag = start_tagged("mtext")
end
setattr(start,a_tagged,mtexttag)
- elseif mtexttag and id == kern_code and (getsubtype(start) == fontkern_code or getattr(start,a_fontkern)) then
+ elseif mtexttag and id == kern_code and (getsubtype(start) == fontkern_code or getsubtype(start) == italickern_code) then -- italickern
setattr(start,a_tagged,mtexttag)
else
if mtexttag then
@@ -199,10 +203,7 @@ process = function(start) -- we cannot use the processor as we have no finalizer
end
if id == math_char_code then
local char = getchar(start)
- local code = getmathcode(char)
- if code then
- code = code[1]
- end
+ local code = getmathcodes(char)
local tag
if code == ordinary_code or code == variable_code then
local ch = chardata[char]
@@ -247,95 +248,102 @@ process = function(start) -- we cannot use the processor as we have no finalizer
elseif id == math_box_code or id == hlist_code or id == vlist_code then
-- keep an eye on math_box_code and see what ends up in there
local attr = getattr(start,a_tagged)
+if not attr then
+ -- just skip
+else
local specification = taglist[attr]
- local tag = specification.tagname
- if tag == "formulacaption" then
- -- skip
- elseif tag == "mstacker" then
- local list = getlist(start)
- if list then
- process(list)
- end
- else
- if tag ~= "mstackertop" and tag ~= "mstackermid" and tag ~= "mstackerbot" then
- tag = "mtext"
- end
- local text = start_tagged(tag)
- setattr(start,a_tagged,text)
- local list = getlist(start)
- if not list then
- -- empty list
- elseif not attr then
- -- box comes from strange place
- set_attributes(list,a_tagged,text) -- only the first node ?
+ if specification then
+ local tag = specification.tagname
+ if tag == "formulacaption" then
+ -- skip
+ elseif tag == "mstacker" then
+ local list = getlist(start)
+ if list then
+ process(list)
+ end
else
- -- Beware, the first node in list is the actual list so we definitely
- -- need to nest. This approach is a hack, maybe I'll make a proper
- -- nesting feature to deal with this at another level. Here we just
- -- fake structure by enforcing the inner one.
- --
- -- todo: have a local list with local tags that then get appended
- --
- local tagdata = specification.taglist
- local common = #tagdata + 1
- local function runner(list,depth) -- quite inefficient
- local cache = { } -- we can have nested unboxed mess so best local to runner
- local keep = nil
- -- local keep = { } -- win case we might need to move keep outside
- for n in traverse_nodes(list) do
- local id = getid(n)
- local mth = id == math_code and getsubtype(n)
- if mth == 0 then
- -- insert(keep,text)
- keep = text
- text = start_tagged("mrow")
- common = common + 1
- end
- local aa = getattr(n,a_tagged)
- if aa then
- local ac = cache[aa]
- if not ac then
- local tagdata = taglist[aa].taglist
- local extra = #tagdata
- if common <= extra then
- for i=common,extra do
- ac = restart_tagged(tagdata[i]) -- can be made faster
- end
- for i=common,extra do
- stop_tagged() -- can be made faster
+ if tag ~= "mstackertop" and tag ~= "mstackermid" and tag ~= "mstackerbot" then
+ tag = "mtext"
+ end
+ local text = start_tagged(tag)
+ setattr(start,a_tagged,text)
+ local list = getlist(start)
+ if not list then
+ -- empty list
+ elseif not attr then
+ -- box comes from strange place
+ set_attributes(list,a_tagged,text) -- only the first node ?
+ else
+ -- Beware, the first node in list is the actual list so we definitely
+ -- need to nest. This approach is a hack, maybe I'll make a proper
+ -- nesting feature to deal with this at another level. Here we just
+ -- fake structure by enforcing the inner one.
+ --
+ -- todo: have a local list with local tags that then get appended
+ --
+ local tagdata = specification.taglist
+ local common = #tagdata + 1
+ local function runner(list,depth) -- quite inefficient
+ local cache = { } -- we can have nested unboxed mess so best local to runner
+ local keep = nil
+ -- local keep = { } -- win case we might need to move keep outside
+ for n in traverse_nodes(list) do
+ local id = getid(n)
+ local mth = id == math_code and getsubtype(n)
+ if mth == 0 then
+ -- insert(keep,text)
+ keep = text
+ text = start_tagged("mrow")
+ common = common + 1
+ end
+ local aa = getattr(n,a_tagged)
+ if aa then
+ local ac = cache[aa]
+ if not ac then
+ local tagdata = taglist[aa].taglist
+ local extra = #tagdata
+ if common <= extra then
+ for i=common,extra do
+ ac = restart_tagged(tagdata[i]) -- can be made faster
+ end
+ for i=common,extra do
+ stop_tagged() -- can be made faster
+ end
+ else
+ ac = text
end
- else
- ac = text
+ cache[aa] = ac
end
- cache[aa] = ac
+ setattr(n,a_tagged,ac)
+ else
+ setattr(n,a_tagged,text)
end
- setattr(n,a_tagged,ac)
- else
- setattr(n,a_tagged,text)
- end
- if id == hlist_code or id == vlist_code then
- runner(getlist(n),depth+1)
- elseif id == glyph_code then
- runner(getcomponents(n),depth+1) -- this should not be needed
- elseif id == disc_code then
- local pre, post, replace = getdisc(n)
- runner(pre,depth+1) -- idem
- runner(post,depth+1) -- idem
- runner(replace,depth+1) -- idem
- end
- if mth == 1 then
- stop_tagged()
- -- text = remove(keep)
- text = keep
- common = common - 1
+ if id == hlist_code or id == vlist_code then
+ runner(getlist(n),depth+1)
+ elseif id == glyph_code then
+ -- this should not be needed (todo: use tounicode info)
+ runner(getcomponents(n),depth+1)
+ elseif id == disc_code then
+ local pre, post, replace = getdisc(n)
+ runner(pre,depth+1) -- idem
+ runner(post,depth+1) -- idem
+ runner(replace,depth+1) -- idem
+ end
+ if mth == 1 then
+ stop_tagged()
+ -- text = remove(keep)
+ text = keep
+ common = common - 1
+ end
end
end
+ runner(list,0)
end
- runner(list,0)
+ stop_tagged()
end
- stop_tagged()
end
+end
elseif id == math_sub_code then -- normally a hbox
local list = getlist(start)
if list then
@@ -420,7 +428,7 @@ process = function(start) -- we cannot use the processor as we have no finalizer
-- left
local properties = { }
insert(fencesstack,properties)
- setattr(start,a_tagged,start_tagged("mfenced",{ properties = properties })) -- needs checking
+ setattr(start,a_tagged,start_tagged("mfenced",properties)) -- needs checking
if delim then
start_tagged("ignore")
local chr = getfield(delim,"small_char")
diff --git a/tex/context/base/mkiv/math-vfu.lua b/tex/context/base/mkiv/math-vfu.lua
index a8a789d28..4767ffa90 100644
--- a/tex/context/base/mkiv/math-vfu.lua
+++ b/tex/context/base/mkiv/math-vfu.lua
@@ -22,9 +22,8 @@ if not modules then modules = { } end modules ['math-vfu'] = {
-- 20D6 -> 2190
-- 20D7 -> 2192
-local type, next = type, next
+local type, next, tonumber = type, next, tonumber
local max = math.max
-local format = string.format
local fastcopy = table.copy
local fonts, nodes, mathematics = fonts, nodes, mathematics
@@ -603,22 +602,24 @@ local function copy_glyph(main,target,original,unicode,slot)
local olddata = original[unicode]
if olddata then
local newdata = {
- width = olddata.width,
- height = olddata.height,
- depth = olddata.depth,
- italic = olddata.italic,
- kerns = olddata.kerns,
- commands = { { "slot", slot, unicode } },
+ width = olddata.width,
+ height = olddata.height,
+ depth = olddata.depth,
+ italic = olddata.italic,
+ kerns = olddata.kerns,
+ tounicode = olddata.tounicode,
+ commands = { { "slot", slot, unicode } },
}
local glyphdata = newdata
local nextglyph = olddata.next
while nextglyph do
local oldnextdata = original[nextglyph]
local newnextdata = {
- commands = { { "slot", slot, nextglyph } },
- width = oldnextdata.width,
- height = oldnextdata.height,
- depth = oldnextdata.depth,
+ width = oldnextdata.width,
+ height = oldnextdata.height,
+ depth = oldnextdata.depth,
+ tounicode = olddata.tounicode,
+ commands = { { "slot", slot, nextglyph } },
}
local newnextglyph = addprivate(main,formatters["M-N-%H"](nextglyph),newnextdata)
newdata.next = newnextglyph
@@ -641,10 +642,11 @@ local function copy_glyph(main,target,original,unicode,slot)
local oldglyph = hvi.glyph
local olddata = original[oldglyph]
local newdata = {
- commands = { { "slot", slot, oldglyph } },
- width = olddata.width,
- height = olddata.height,
- depth = olddata.depth,
+ width = olddata.width,
+ height = olddata.height,
+ depth = olddata.depth,
+ tounicode = olddata.tounicode,
+ commands = { { "slot", slot, oldglyph } },
}
hvi.glyph = addprivate(main,formatters["M-H-%H"](oldglyph),newdata)
-- report_virtual("copied h variant: %X at index %i",hvi.glyph,i)
@@ -659,10 +661,11 @@ local function copy_glyph(main,target,original,unicode,slot)
local oldglyph = vvi.glyph
local olddata = original[oldglyph]
local newdata = {
- commands = { { "slot", slot, oldglyph } },
- width = olddata.width,
- height = olddata.height,
- depth = olddata.depth,
+ width = olddata.width,
+ height = olddata.height,
+ depth = olddata.depth,
+ tounicode = olddata.tounicode,
+ commands = { { "slot", slot, oldglyph } },
}
vvi.glyph = addprivate(main,formatters["M-V-%H"](oldglyph),newdata)
-- report_virtual("copied v variant: %X at index %i",vvi.glyph,i)
diff --git a/tex/context/base/mkiv/meta-fnt.lua b/tex/context/base/mkiv/meta-fnt.lua
index 95bdfa6d9..d061c926a 100644
--- a/tex/context/base/mkiv/meta-fnt.lua
+++ b/tex/context/base/mkiv/meta-fnt.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['meta-fnt'] = {
license = "see context related readme files"
}
+local next = next
local concat = table.concat
local format = string.format
local formatters = string.formatters
@@ -66,7 +67,7 @@ local flusher = {
if inline then
characters[slot] = {
commands = {
- { "special", "pdf:" .. code },
+ { "pdf", "origin", code },
}
}
else
@@ -190,7 +191,7 @@ statistics.register("metapost font generation", function()
if total > 0 then
local time = statistics.elapsedtime(flusher)
if total > 0 then
- return format("%i glyphs, %.3f seconds runtime, %i glyphs/second", total, time, total/time)
+ return format("%i glyphs, %.3f seconds runtime, %.1f glyphs/second", total, time, total/time)
else
return format("%i glyphs, %.3f seconds runtime", total, time)
end
@@ -201,7 +202,7 @@ statistics.register("metapost font loading",function()
if variants > 0 then
local time = statistics.elapsedtime(metapost.fonts)
if variants > 0 then
- return format("%.3f seconds, %i instances, %0.3f instances/second", time, variants, variants/time)
+ return format("%.3f seconds, %i instances, %.3f instances/second", time, variants, variants/time)
else
return format("%.3f seconds, %i instances", time, variants)
end
diff --git a/tex/context/base/mkiv/meta-fnt.mkiv b/tex/context/base/mkiv/meta-fnt.mkiv
index e54c0be0a..c248f752b 100644
--- a/tex/context/base/mkiv/meta-fnt.mkiv
+++ b/tex/context/base/mkiv/meta-fnt.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{MetaPost Graphics / Fonts}
-\registerctxluafile{meta-fnt}{1.001}
+\registerctxluafile{meta-fnt}{}
\unprotect
diff --git a/tex/context/base/mkiv/meta-fun.lua b/tex/context/base/mkiv/meta-fun.lua
index 7aaaf2818..ddbbd9a52 100644
--- a/tex/context/base/mkiv/meta-fun.lua
+++ b/tex/context/base/mkiv/meta-fun.lua
@@ -10,7 +10,8 @@ if not modules then modules = { } end modules ['meta-fun'] = {
local format, load, type = string.format, load, type
-local metapost = metapost
+local context = context
+local metapost = metapost
metapost.metafun = metapost.metafun or { }
local metafun = metapost.metafun
diff --git a/tex/context/base/mkiv/meta-fun.mkiv b/tex/context/base/mkiv/meta-fun.mkiv
index c3a4ce8d4..e30660392 100644
--- a/tex/context/base/mkiv/meta-fun.mkiv
+++ b/tex/context/base/mkiv/meta-fun.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{MetaPost Graphics / Goodies}
-\registerctxluafile{meta-fun}{1.001}
+\registerctxluafile{meta-fun}{}
\unprotect
diff --git a/tex/context/base/mkiv/meta-grd.mkiv b/tex/context/base/mkiv/meta-grd.mkiv
index da410ba68..8b1a934d6 100644
--- a/tex/context/base/mkiv/meta-grd.mkiv
+++ b/tex/context/base/mkiv/meta-grd.mkiv
@@ -84,9 +84,9 @@
\def\typo_grid_base[#1]%
{\hbox\bgroup
\getdummyparameters
- [\c!nx=10,\c!ny=10,\c!dx=.5,\c!dy=.5,\c!xstep=0,\c!ystep=0,
- \c!unit=\s!cm,\c!scale=1,\c!factor=1,
- \c!offset=.25ex,\c!xoffset=\directdummyparameter\c!offset,\c!yoffset=\directdummyparameter\c!offset,
+ [\c!nx=10,\c!ny=10,\c!dx=.5,\c!dy=.5,\c!xstep=0,\c!ystep=0,%
+ \c!unit=\s!cm,\c!scale=1,\c!factor=1,%
+ \c!offset=.25ex,\c!xoffset=\directdummyparameter\c!offset,\c!yoffset=\directdummyparameter\c!offset,%
\c!align=,
#1]%
\usedummystyleandcolor\c!style\c!color
diff --git a/tex/context/base/mkiv/meta-imp-mat.mkiv b/tex/context/base/mkiv/meta-imp-mat.mkiv
index 8913845fd..11a9ded34 100644
--- a/tex/context/base/mkiv/meta-imp-mat.mkiv
+++ b/tex/context/base/mkiv/meta-imp-mat.mkiv
@@ -18,6 +18,7 @@
% / for cambria
\startMPextensions
+
vardef math_stacker_overbracket_shape =
image (
draw
@@ -78,72 +79,107 @@
vardef math_stacker_underbar_shape =
math_stacker_overbar_shape rotated 180
enddef ;
- vardef math_stacker_arrow_shape =
+
+ % arrows
+
+ vardef math_stacker_arrow_shape(expr axis, ex, em) =
image (
+ interim ahlength := ex/2;
drawarrow
- (OverlayWidth,OverlayOffset) -- (0,OverlayOffset)
+ ((OverlayWidth,ex/2) -- (0,ex/2))
+ withcolor
+ OverlayLineColor ;
+ setbounds currentpicture to unitsquare xysized(OverlayWidth,ex) ;
+ )
+ enddef ;
+ vardef math_stacker_leftrightarrow_shape(expr axis, ex, em) =
+ image (
+ interim ahlength := ex/2;
+ drawdblarrow
+ ((OverlayWidth,ex/2) -- (0,ex/2))
withcolor
OverlayLineColor ;
- setbounds currentpicture to boundingbox currentpicture bottomenlarged (OverlayOffset/2) topenlarged (OverlayOffset/2) ;
+ setbounds currentpicture to unitsquare xysized(OverlayWidth,ex) ;
)
enddef ;
- vardef math_stacker_leftarrow_shape =
- math_stacker_arrow_shape
+ vardef math_stacker_rightoverleftarrow_shape(expr axis, ex, em) =
+ image (
+ interim ahlength := ex/2;
+ drawdoublearrows
+ ((OverlayWidth,ex/2) -- (0,ex/2))
+ withcolor
+ OverlayLineColor ;
+ setbounds currentpicture to unitsquare xysized(OverlayWidth,ex) ;
+ )
enddef ;
- vardef math_stacker_rightarrow_shape =
- math_stacker_arrow_shape rotated 180
+ vardef math_stacker_leftarrow_shape(expr axis, ex, em) =
+ math_stacker_arrow_shape(axis,ex,em)
enddef ;
- def math_stacker_draw(expr p) =
- draw
- p
- withpen
- pencircle
- xscaled (2OverlayLineWidth)
- % yscaled (3OverlayLineWidth/4)
- yscaled (3OverlayLineWidth)
- % rotated 30 ;
- rotated 45 ;
+ vardef math_stacker_rightarrow_shape(expr axis, ex, em) =
+ math_stacker_arrow_shape(axis,ex,em) rotated 180
+ enddef ;
+
+ % main handler
+
+ def math_stacker_draw_accent(expr p) =
+ draw p withpen pencircle scaled OverlayLineWidth xyscaled (2,3) rotated 45 ;
+ enddef ;
+
+ def math_stacker_draw_arrow(expr p) =
+ draw p withpen pencircle scaled OverlayLineWidth ;
enddef ;
\stopMPextensions
-\startuniqueMPgraphic{math:stacker:\number"FE3B4}
- math_stacker_draw(math_stacker_overbracket_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"FE3B4}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_overbracket_shape) ;
+\stopuniqueMPgraphic
+
+\startuniqueMPgraphic{math:stacker:\number"FE3B5}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_underbracket_shape) ;
+\stopuniqueMPgraphic
+
+\startuniqueMPgraphic{math:stacker:\number"FE3DC}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_overparent_shape) ;
+\stopuniqueMPgraphic
+
+\startuniqueMPgraphic{math:stacker:\number"FE3DD}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_underparent_shape) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"FE3B5}
- math_stacker_draw(math_stacker_underbracket_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"FE3DE}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_overbrace_shape) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"FE3DC}
- math_stacker_draw(math_stacker_overparent_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"FE3DF}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_underbrace_shape) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"FE3DD}
- math_stacker_draw(math_stacker_underparent_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"FE33E}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_overbar_shape) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"FE3DE}
- math_stacker_draw(math_stacker_overbrace_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"FE33F}{axis,ex,em}
+ math_stacker_draw_accent(math_stacker_underbar_shape) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"FE3DF}
- math_stacker_draw(math_stacker_underbrace_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"2190}{axis,ex,em}
+ math_stacker_draw_arrow(math_stacker_leftarrow_shape(\MPvar{axis},\MPvar{ex},\MPvar{em})) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"FE33E}
- math_stacker_draw(math_stacker_overbar_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"2192}{axis,ex,em}
+ math_stacker_draw_arrow(math_stacker_rightarrow_shape(\MPvar{axis},\MPvar{ex},\MPvar{em})) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"FE33F}
- math_stacker_draw(math_stacker_underbar_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"2194}{axis,ex,em}
+ math_stacker_draw_arrow(math_stacker_leftrightarrow_shape(\MPvar{axis},\MPvar{ex},\MPvar{em})) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"2190}
- math_stacker_draw(math_stacker_leftarrow_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"27F7}{axis,ex,em}
+ math_stacker_draw_arrow(math_stacker_leftrightarrow_shape(\MPvar{axis},\MPvar{ex},\MPvar{em})) ;
\stopuniqueMPgraphic
-\startuniqueMPgraphic{math:stacker:\number"2192}
- math_stacker_draw(math_stacker_rightarrow_shape) ;
+\startuniqueMPgraphic{math:stacker:\number"21C4}{axis,ex,em}
+ math_stacker_draw_arrow(math_stacker_rightoverleftarrow_shape(\MPvar{axis},\MPvar{ex},\MPvar{em})) ;
\stopuniqueMPgraphic
%D Radicals:
@@ -158,7 +194,7 @@
enddef ;
\stopMPextensions
-\startuniqueMPgraphic{math:radical:default}%{...}
+\startuniqueMPgraphic{math:radical:default}{axis,ex,em}
draw
math_radical_simple(OverlayWidth,OverlayHeight,OverlayDepth,OverlayOffset)
withpen pencircle xscaled (2OverlayLineWidth) yscaled (3OverlayLineWidth/4) rotated 30
diff --git a/tex/context/base/mkiv/meta-imp-nodes.mkiv b/tex/context/base/mkiv/meta-imp-nodes.mkiv
deleted file mode 100644
index 2555fcaa2..000000000
--- a/tex/context/base/mkiv/meta-imp-nodes.mkiv
+++ /dev/null
@@ -1,34 +0,0 @@
-%D \module
-%D [ file=meta-imp-nodes,
-%D version=2016.11.23,
-%D title=\METAPOST\ Graphics,
-%D subtitle=Nodes,
-%D author=Alan Braslau and Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-%D We might add more here. Also, the node module might get preloaded in
-%D all instances.
-
-\unprotect
-
-\defineMPinstance
- [nodes]
- [\s!format=metafun,
- \s!extensions=\v!yes,
- \s!initializations=\v!yes,
- \c!method=\s!double]
-
-\defineframed
- [node]
- [\c!frame=\v!off]
-
-\startMPdefinitions{nodes}
- loadmodule "node" ;
-\stopMPdefinitions
-
-\protect
diff --git a/tex/context/base/mkiv/meta-imp-txt.mkiv b/tex/context/base/mkiv/meta-imp-txt.mkiv
index b2a6d6d1d..56da9229d 100644
--- a/tex/context/base/mkiv/meta-imp-txt.mkiv
+++ b/tex/context/base/mkiv/meta-imp-txt.mkiv
@@ -11,15 +11,12 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D In this library some handy text manipulations are
-%D defined. Some can and will be improved as soon as the
-%D \TEX||\METAPOST\ interface is stable. Some of the
-%D solutions may look weird, which is entirely my fault,
-%D since I implemented them in the process of getting grip
-%D on this kind of manipulations. Undoubtly better
-%D \METAPOST\ code is possible, but my way of learning
-%D this kind of trickery happens to be by \quote {trial
-%D and error} and \quote {look and feel} (as well as
+%D In this library some handy text manipulations are defined. Some can and will be
+%D improved as soon as the \TEX||\METAPOST\ interface is stable. Some of the
+%D solutions may look weird, which is entirely my fault, since I implemented them in
+%D the process of getting grip on this kind of manipulations. Undoubtly better
+%D \METAPOST\ code is possible, but my way of learning this kind of trickery happens
+%D to be by \quote {trial and error} and \quote {look and feel} (as well as
%D identifying tricks in Hobby's code).
% textext ipv btex ... etex
@@ -30,112 +27,102 @@
\definesystemvariable {sh} % ShapedText .. todo: commandhandler
+\unexpanded\def\setupshapetexts
+ {\dodoubleempty\getparameters[\??sh]}
+
+\setupshapetexts
+ [\c!bodyfont=]
+
\startMPextensions
loadmodule "text" ;
\stopMPextensions
-%%%%%%%
-
-% \def\newchar#1{\chardef#1=0 }
-
-\ifdefined\MPtoks \else \newtoks\MPtoks \fi
-\ifdefined\MPbox \else \newbox \MPbox \fi
-
-\ifdefined\parwidth \else \newdimen\parwidth \fi
-\ifdefined\parheight \else \newdimen\parheight \fi
-\ifdefined\parvoffset \else \newdimen\parvoffset \fi
-\ifdefined\parhoffset \else \newdimen\parhoffset \fi
-\ifdefined\parlines \else \newcount\parlines \fi
-\ifdefined\partoks \else \newtoks \partoks \fi
-\ifdefined\shapetextbox \else \newbox \shapetextbox \fi
- \newif \ifparseries
-\ifdefined\parfirst \else \chardef \parfirst=0 \fi
+\ifdefined\parwidth \else
+ \newdimen\parwidth
+ \newdimen\parheight
+ \newdimen\parvoffset
+ \newdimen\parhoffset
+ \newcount\parlines
+ \newtoks \partoks
+ \newbox \shapetextbox
+ \newcount\parfirst
+\fi
\unexpanded\def\startshapetext[#1]%
{\global\newcounter\currentshapetext
\global\setbox\shapetextbox\vbox\bgroup
- \expanded{\switchtobodyfont[\@@shbodyfont]}%
+ \switchtobodyfont[\@@shbodyfont]%
\dontcomplain
\hsize\parwidth
\setuptolerance[\v!verytolerant,\v!stretch]%
- \!!counta\zerocount
- \!!toksa\emptytoks
+ \scratchcounter\zerocount
+ \scratchtoks\emptytoks
\def\docommand##1%
{\setbox\scratchbox\hbox{\useMPgraphic{##1}}%
- \global\chardef\parfirst\zerocount
- \getMPdata % \readlocfile{\MPdatafile}{}{}%
+ \global\parfirst\zerocount
+ \getMPdata
\setshapecharacteristics
- \advance\!!counta by \parlines
- \expandafter\appendtoks\the\partoks\to\!!toksa}%
+ \advance\scratchcounter by \parlines
+ \expandafter\appendtoks\the\partoks\to\scratchtoks}%
\processcommalist[#1]\docommand
- \global\parseriestrue
- \xdef\totalparlines{\the\!!counta}%
- \global\partoks\!!toksa
- %\ifx\partoks\emptytoks\else % safeguard
- \expanded{\parshape \the\!!counta \the\!!toksa}%
- %\fi
+ \xdef\totalparlines{\the\scratchcounter}%
+ \global\partoks\scratchtoks
+ \parshape \the\scratchcounter \the\scratchtoks\relax
\setshapecharacteristics % extra dummy
- \ifparseries\def\par{\endgraf\adaptparshape}\fi
- \EveryPar{\begstrut}}
+ \def\par{\endgraf\adaptparshape}%
+ \everypar{\begstrut}}
\unexpanded\def\stopshapetext
{\endstrut
- %\removebottomthings
\egroup
\global\newcounter\currentshapetext
\getshapecharacteristics}
\unexpanded\def\adaptparshape
{\def\docommand##1%
- {\ifcase\!!counta
- \expandafter\appendtoks\space##1 \to\!!toksa
+ {\ifcase\scratchcounter
+ \expandafter\appendtoks\space##1 \to\scratchtoks
\else
- \advance\!!counta \minusone
+ \advance\scratchcounter\minusone
\fi}%
- \!!counta\prevgraf
- \doglobal\decrement(\totalparlines,\!!counta)%
- \multiply\!!counta \plustwo
- \!!toksa\emptytoks
+ \scratchcounter\prevgraf
+ \doglobal\decrement(\totalparlines,\scratchcounter)%
+ \multiply\scratchcounter\plustwo
+ \scratchtoks\emptytoks
\expanded{\processseparatedlist[\the\partoks][\space]}\docommand
- \global\partoks\!!toksa
- %\ifx\partoks\emptytoks\else % safeguard
- \expanded{\parshape\totalparlines\the\partoks}%
- }%\fi}
+ \global\partoks\scratchtoks
+ \parshape\totalparlines\the\partoks\relax}
\unexpanded\def\getshapecharacteristics
{\doglobal\increment\currentshapetext
\doifelsedefined{parlines:\currentshapetext}
- {\global\parlines \getvalue{parlines:\currentshapetext}%
- \global\chardef\parfirst \getvalue{parfirst:\currentshapetext}%
- \global\parvoffset \getvalue{parvoffset:\currentshapetext}%
- \global\parhoffset \getvalue{parhoffset:\currentshapetext}%
- \global\parwidth \getvalue{parwidth:\currentshapetext}%
- \global\parheight \getvalue{parheight:\currentshapetext}}
- {\global\parlines \plusone
- \global\chardef\parfirst \zerocount
- \global\parvoffset \zeropoint
- \global\parhoffset \zeropoint
- \global\parwidth \hsize
- \global\parheight \vsize}}
+ {\getvalue{parlines:\currentshapetext}}
+ {\global\parlines \plusone
+ \global\parfirst \zerocount
+ \global\parvoffset\zeropoint
+ \global\parhoffset\zeropoint
+ \global\parwidth \hsize
+ \global\parheight \vsize}}
\unexpanded\def\setshapecharacteristics
{\doglobal\increment\currentshapetext
- \setxvalue{parlines:\currentshapetext }{\the\parlines}%
- \setxvalue{parfirst:\currentshapetext }{\the\parfirst}%
- \setxvalue{parvoffset:\currentshapetext}{\the\parvoffset}%
- \setxvalue{parhoffset:\currentshapetext}{\the\parhoffset}%
- \setxvalue{parwidth:\currentshapetext }{\the\parwidth}%
- \setxvalue{parheight:\currentshapetext }{\the\parheight}}
+ \setxvalue{parlines:\currentshapetext}%
+ {\global\parlines \the\parlines
+ \global\parfirst \the\parfirst
+ \global\parvoffset\the\parvoffset
+ \global\parhoffset\the\parhoffset
+ \global\parwidth \the\parwidth
+ \global\parheight \the\parheight}}
\unexpanded\def\getshapetext % option: unvbox
{\vbox\bgroup
\forgetall
\dontcomplain
\setbox\scratchbox\vbox to \parheight
- {\expanded{\switchtobodyfont[\@@shbodyfont]}% evt strutheight en
- \splittopskip\strutheight % lineheight opslaan
- \vskip\parvoffset % scheelt switch en
- \ifcase\parfirst\or\vskip\lineheight\fi % is ook veiliger
+ {\switchtobodyfont[\@@shbodyfont]%
+ \splittopskip\strutheight
+ \vskip\parvoffset
+ \ifcase\parfirst\else\vskip\lineheight\fi
\hskip\parhoffset
\hbox{\vsplit\shapetextbox to \parlines\lineheight}}%
\wd\scratchbox\parwidth
@@ -145,89 +132,8 @@
\getshapecharacteristics
\egroup}
-\unexpanded\def\setupshapetexts
- {\dodoubleempty\getparameters[\??sh]}
-
-\setupshapetexts
- [\c!bodyfont=]
-
\doifundefined{RotFont}{\definefont[RotFont][RegularBold*default]}
-% \startuseMPgraphic{followtokens}
-% % we default to nothing
-% \stopuseMPgraphic
-
-% \unexpanded\def\processfollowingtoken#1% strut toegevoegd
-% {\appendtoks#1\to\MPtoks
-% \setbox\MPbox=\hbox{\RotFont\setstrut\strut\the\MPtoks}%
-% \startMPdrawing
-% n := n + 1 ; len[n] := \the\wd\MPbox ;
-% \stopMPdrawing
-% \startMPdrawing[-]
-% % pic[n] := textext{\RotFont\setstrut\strut#1} ; % btex \RotFont\setstrut\strut#1 etex ;
-% pic[n] := btex \RotFont\setstrut\strut#1 etex ;
-% pic[n] := pic[n] shifted - llcorner pic[n] ;
-% \stopMPdrawing}
-%
-% \unexpanded\def\dofollowtokens#1#2%
-% {\vbox\bgroup
-% \forgetall
-% \dontcomplain
-% \doifundefined{RotFont}{\definefont[RotFont][RegularBold*default]}%
-% \MPtoks\emptytoks
-% \resetMPdrawing
-% \startMPdrawing
-% \includeMPgraphic{followtokens} ;
-% picture pic[] ; numeric len[], n ; n := 0 ;
-% \stopMPdrawing
-% \handletokens#2\with\processfollowingtoken
-% \startMPdrawing
-% if unknown RotPath : path RotPath ; RotPath := origin ; fi ;
-% if unknown RotColor : color RotColor ; RotColor := black ; fi ;
-% if unknown TraceRot : boolean TraceRot ; TraceRot := false ; fi ;
-% if unknown ExtraRot : numeric ExtraRot ; ExtraRot := 0 ; fi ;
-% numeric al, at, pl, pc, wid, pos ; pair ap, ad ;
-% al := arclength RotPath ;
-% if al=0 :
-% al := len[n] + ExtraRot ;
-% RotPath := origin -- (al,0) ;
-% fi ;
-% if al1 : (n-1) else : 1 fi) ;
-% pc := 0 ;
-% else : % centered / MP
-% pl := 0 ;
-% pc := arclength RotPath/2 - len[n]/2 ;
-% fi ;
-% if TraceRot :
-% draw RotPath withpen pencircle scaled 1pt withcolor blue ;
-% fi ;
-% for i=1 upto n :
-% wid := abs(xpart urcorner pic[i] - xpart llcorner pic[i]) ;
-% pos := len[i]-wid/2 + (i-1)*pl + pc ;
-% at := arctime pos of RotPath ;
-% ap := point at of RotPath ;
-% ad := direction at of RotPath ;
-% draw pic[i] shifted (-wid/2,0) rotated(angle(ad)) shifted ap
-% withcolor RotColor ;
-% if TraceRot :
-% draw boundingbox
-% pic[i] shifted (-wid/2,0) rotated(angle(ad)) shifted ap
-% withpen pencircle scaled .25pt withcolor red ;
-% draw ap
-% withpen pencircle scaled .50pt withcolor green ;
-% fi ;
-% endfor ;
-% \stopMPdrawing
-% \MPdrawingdonetrue
-% \getMPdrawing
-% \resetMPdrawing
-% \egroup}
-
\unexpanded\def\getfollowtoken#1%
{\hbox\bgroup
\strut
@@ -479,8 +385,7 @@
\setlocalhsize \noindent \reuseMPgraphic{EnglishRule}
\stoplinecorrection}
-%D The following macro returns a tight bound character
-%D sequence.
+%D The following macro returns a tight bound character sequence.
%D
%D \useMPlibrary[txt]
%D
diff --git a/tex/context/base/mkiv/meta-ini.lua b/tex/context/base/mkiv/meta-ini.lua
index d3865c433..6c4768671 100644
--- a/tex/context/base/mkiv/meta-ini.lua
+++ b/tex/context/base/mkiv/meta-ini.lua
@@ -8,138 +8,144 @@ if not modules then modules = { } end modules ['meta-ini'] = {
local tonumber = tonumber
local format = string.format
+local concat = table.concat
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local P, Cs, R, S, C, Cc = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc
-local context = context
-
-metapost = metapost or { }
-
--- experimental
+metapost = metapost or { }
+local metapost = metapost
+local context = context
local colorhash = attributes.list[attributes.private('color')]
-
local textype = tex.type
local MPcolor = context.MPcolor
--- local validdimen = lpegpatterns.validdimen * P(-1)
---
--- function commands.prepareMPvariable(v) -- slow but ok
--- if v == "" then
--- MPcolor("black")
--- else
--- local typ, var = match(v,"(.):(.*)")
--- if not typ then
--- -- parse
--- if colorhash[v] then
--- MPcolor(v)
--- elseif tonumber(v) then
--- context(v)
--- elseif lpegmatch(validdimen,v) then
--- return context("\\the\\dimexpr %s",v)
--- else
--- for s in gmatch(v,"\\([a-zA-Z]+)") do -- can have trailing space
--- local t = textype(s)
--- if t == "dimen" then
--- return context("\\the\\dimexpr %s",v)
--- elseif t == "count" then
--- return context("\\the\\numexpr %s",v)
--- end
--- end
--- context("\\number %s",v) -- 0.4 ...
--- end
--- elseif typ == "d" then -- to be documented
--- -- dimension
--- context("\\the\\dimexpr %s",var)
--- elseif typ == "n" then -- to be documented
--- -- number
--- context("\\the\\numexpr %s",var)
--- elseif typ == "s" then -- to be documented
--- -- string
--- context(var)
--- elseif typ == "c" then -- to be documented
--- -- color
--- MPcolor(var)
--- else
--- context(var)
--- end
--- end
--- end
-
--- we can actually get the dimen/count values here
-
-local dimenorname =
- lpegpatterns.validdimen / function(s)
- context("\\the\\dimexpr %s",s)
- end
- + (C(lpegpatterns.float) + Cc(1)) * lpegpatterns.space^0 * P("\\") * C(lpegpatterns.letter^1) / function(f,s)
- local t = textype(s)
- if t == "dimen" then
- context("\\the\\dimexpr %s\\%s\\relax",f,s)
- elseif t == "count" then
- context("\\the\\numexpr \\%s * %s\\relax",s,f) -- \scratchcounter is not permitted
+do
+
+ local dimenorname =
+ lpegpatterns.validdimen / function(s)
+ context("\\the\\dimexpr %s",s)
+ end
+ + (C(lpegpatterns.float) + Cc(1)) * lpegpatterns.space^0 * P("\\") * C(lpegpatterns.letter^1) / function(f,s)
+ local t = textype(s)
+ if t == "dimen" then
+ context("\\the\\dimexpr %s\\%s\\relax",f,s)
+ elseif t == "count" then
+ context("\\the\\numexpr \\%s * %s\\relax",s,f) -- \scratchcounter is not permitted
+ end
end
- end
-local splitter = lpeg.splitat("::",true)
-
-interfaces.implement {
- name = "prepareMPvariable",
- arguments = "string",
- actions = function(v)
- if v == "" then
- -- MPcolor("black")
- context("black")
- else
- local typ, var = lpegmatch(splitter,v)
- if not var then
- -- parse
- if colorhash[v] then
- -- MPcolor(v)
+ local splitter = lpeg.splitat("::",true)
+
+ interfaces.implement {
+ name = "prepareMPvariable",
+ arguments = "string",
+ actions = function(v)
+ if v == "" then
+ -- MPcolor("black")
+ context("black")
+ else
+ local typ, var = lpegmatch(splitter,v)
+ if not var then
+ -- parse
+ if colorhash[v] then
+ -- MPcolor(v)
+ context("%q",var)
+ elseif tonumber(v) then
+ context(v)
+ elseif not lpegmatch(dimenorname,v) then
+ context("\\number %s",v) -- 0.4 ...
+ end
+ elseif typ == "d" then -- to be documented
+ -- dimension
+ context("\\the\\dimexpr %s\\relax",var)
+ elseif typ == "n" then -- to be documented
+ -- number
+ context("\\the\\numexpr %s\\relax",var)
+ elseif typ == "s" then -- to be documented
+ -- string
+ -- context(var)
context("%q",var)
- elseif tonumber(v) then
- context(v)
- elseif not lpegmatch(dimenorname,v) then
- context("\\number %s",v) -- 0.4 ...
+ elseif typ == "c" then -- to be documented
+ -- color
+ -- MPcolor(var)
+ context("%q",var)
+ else
+ context(var)
end
- elseif typ == "d" then -- to be documented
- -- dimension
- context("\\the\\dimexpr %s\\relax",var)
- elseif typ == "n" then -- to be documented
- -- number
- context("\\the\\numexpr %s\\relax",var)
- elseif typ == "s" then -- to be documented
- -- string
- -- context(var)
- context("%q",var)
- elseif typ == "c" then -- to be documented
- -- color
- -- MPcolor(var)
- context("%q",var)
- else
- context(var)
end
end
+ }
+
+end
+
+do
+
+ local ctx_mathematics = context.mathematics
+
+ -- function metapost.formatnumber(f,n) -- just lua format
+ -- f = gsub(f,"@(%d)","%%.%1")
+ -- f = gsub(f,"@","%%")
+ -- f = format(f,tonumber(n) or 0)
+ -- f = gsub(f,"e([%+%-%d]+)",function(s)
+ -- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros
+ -- end)
+ -- context.mathematics(f)
+ -- end
+
+ -- formatters["\\times10^{%N}"](s) -- strips leading zeros too
+
+ local one = Cs((P("@")/"%%." * (R("09")^1) + P("@")/"%%" + 1)^0)
+ local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / function(s)
+ -- return format("\\times10^{%s}",tonumber(s) or s)
+ return "\\times10^{" .. (tonumber(s) or s) .."}"
+ end) + 1)^1)
+
+ -- local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / formatters["\\times10^{%N}"]) + 1)^1)
+
+ function metapost.formatnumber(fmt,n) -- just lua format
+ ctx_mathematics(lpegmatch(two,format(lpegmatch(one,fmt),n)))
end
-}
--- function metapost.formatnumber(f,n) -- just lua format
--- f = gsub(f,"@(%d)","%%.%1")
--- f = gsub(f,"@","%%")
--- f = format(f,tonumber(n) or 0)
--- f = gsub(f,"e([%+%-%d]+)",function(s)
--- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros
--- end)
--- context.mathematics(f)
--- end
+end
+
+do
--- formatters["\\times10^{%N}"](s) -- strips leading zeros too
+ -- this is an old pass-data-to-tex mechanism
-local one = Cs((P("@")/"%%." * (R("09")^1) + P("@")/"%%" + 1)^0)
-local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / function(s) return format("\\times10^{%s}",tonumber(s) or s) end) + 1)^1)
+ local ctx_printtable = context.printtable
--- local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / formatters["\\times10^{%N}"]) + 1)^1)
+ local data = false
+
+ function mp.start_saving_data(n)
+ data = { }
+ end
+
+ function mp.stop_saving_data()
+ if data then
+ -- nothing
+ end
+ end
+
+ function mp.finish_saving_data()
+ if data then
+ -- nothing
+ end
+ end
+
+ function mp.save_data(str)
+ if data then
+ data[#data+1] = str
+ end
+ end
+
+ interfaces.implement {
+ name = "getMPdata",
+ actions = function()
+ if data then
+ ctx_printtable(data,"\r")
+ end
+ end
+ }
-function metapost.formatnumber(fmt,n) -- just lua format
- context.mathematics(lpegmatch(two,format(lpegmatch(one,fmt),n)))
end
diff --git a/tex/context/base/mkiv/meta-ini.mkiv b/tex/context/base/mkiv/meta-ini.mkiv
index 1d0fa11e0..5fc89e2f6 100644
--- a/tex/context/base/mkiv/meta-ini.mkiv
+++ b/tex/context/base/mkiv/meta-ini.mkiv
@@ -58,7 +58,7 @@
\writestatus{loading}{MetaPost Graphics / Initializations}
-\registerctxluafile{meta-ini}{1.001}
+\registerctxluafile{meta-ini}{}
\unprotect
@@ -84,9 +84,9 @@
\def \t_meta_inclusions {\csname\??mpinclusions \currentMPinstance\endcsname} % token register
\def \t_meta_definitions {\csname\??mpdefinitions\currentMPinstance\endcsname} % token register
-% The next command is, of course, dedicated to Mojca, who
-% needs it for gnuplot. Anyway, the whole multiple engine
-% mechanism is to keep her gnuplot from interfering.
+%D The next command is, of course, dedicated to Mojca, who needs it for gnuplot.
+%D Anyway, the whole multiple engine mechanism is to keep her gnuplot from
+%D interfering.
\unexpanded\def\startMPdefinitions
{\dosinglegroupempty\meta_start_definitions}
@@ -155,13 +155,6 @@
\global\t_meta_inclusions\expandafter{\the\t_meta_inclusions#2}%
\let\currentMPinstance\m_meta_saved_instance}
-% \def\meta_preset_definitions
-% {\edef\overlaywidth {\overlaywidth \space}%
-% \edef\overlayheight {\overlayheight \space}%
-% \edef\overlaylinewidth{\overlaylinewidth\space}}
-
-\let\meta_preset_definitions\relax
-
\installcommandhandler \??mpinstance {MPinstance} \??mpinstance
\setupMPinstance
@@ -261,8 +254,6 @@
{\begingroup
\meta_enable_include
\the\everyMPgraphic
- \meta_preset_definitions
- %\setMPrandomseed % this has to change
\edef\p_initializations{\MPinstanceparameter\s!initializations}%
\ifx\p_initializations\v!yes
\settrue \c_meta_include_initializations
@@ -335,17 +326,6 @@
\meta_process_graphic{input "#1" ;}%
\endgroup}
-% \newif\ifsetMPrandomseed \setMPrandomseedtrue % false by default
-%
-% \let\theMPrandomseed\empty
-%
-% \def\setMPrandomseed
-% {\ifsetMPrandomseed
-% \def\theMPrandomseed{randomseed:=\mprandomnumber;}%
-% \else
-% \let\theMPrandomseed\empty
-% \fi}
-
%D Calling up previously defined graphics.
% \def\includeMPgraphic#1% gets expanded !
@@ -365,12 +345,11 @@
%
% \useMPgraphic{x}
%
-% so we cannot overload unless we let back to the original meanings
-% each graphic ... a better solution is:
+% so we cannot overload unless we let back to the original meanings each graphic
+% ... a better solution is:
\def\includeMPgraphic#1% gets expanded !
{\ifcsname\??mpgraphic#1\endcsname
- %\doubleexpandafter\fourthoffourarguments\csname\??mpgraphic#1\endcsname ; % ; is safeguard
\doubleexpandafter\fourthoffourarguments\lastnamedcs ; % ; is safeguard
\fi}
@@ -408,7 +387,6 @@
{\relax
\bgroup
\meta_enable_include
- \meta_preset_definitions % in case #2 has measures
\doifelse{#1}{-}{\convertargument#2\to\asciia}{\def\asciia{#2}}%
\xdef\MPdrawingdata{\MPdrawingdata\asciia}%
\egroup}
@@ -419,7 +397,6 @@
{\relax
\bgroup
\meta_enable_include
- \meta_preset_definitions
\xdef\MPdrawingdata{\MPdrawingdata#1}%
\egroup}
@@ -458,8 +435,8 @@
\relax}%
\meta_stop_current_graphic}
-%D Since we want lables to follow the document settings, we
-%D also set the font related variables.
+%D Since we want lables to follow the document settings, we also set the font
+%D related variables.
\unexpanded\def\MPfontsizehskip#1%
{\dontleavehmode
@@ -476,12 +453,6 @@
% defaultscale:=1;
\stopMPinitializations
-% watch out, this is a type1 font because mp can only handle 8 bit fonts
-
-% \startMPinitializations % scale is not yet ok
-% defaultfont:="rm-lmtt10";
-% \stopMPinitializations
-
%D A signal that we're in combined \CONTEXT||\METAFUN mode:
\startMPextensions
@@ -496,12 +467,11 @@
%D \macros
%D {setupMPvariables}
%D
-%D When we build collections of \METAPOST\ graphics, like
-%D background and buttons, the need for passing settings
-%D arises. By (mis|)|using the local prefix that belongs to
-%D \type {\framed}, we get a rather natural interface to
-%D backgrounds. To prevent conflicts, we will use the \type
-%D {-} in \METAPOST\ specific variables, like:
+%D When we build collections of \METAPOST\ graphics, like background and buttons,
+%D the need for passing settings arises. By (mis|)|using the local prefix that
+%D belongs to \type {\framed}, we get a rather natural interface to backgrounds. To
+%D prevent conflicts, we will use the \type {-} in \METAPOST\ specific variables,
+%D like:
%D
%D \starttyping
%D \setupMPvariables[meta:button][size=20pt]
@@ -510,9 +480,9 @@
% \lineheight 2pt 2 \scratchcounter red 0.4 .5\bodyfontsize
%
% see cont-loc for test code
-%
-% currently the inheritance of backgrounds does not work and
-% we might drop it anyway (too messy)
+
+%D Currently the inheritance of backgrounds does not work and we might drop it
+%D anyway (too messy)
\newbox\b_meta_variable_box
@@ -623,9 +593,9 @@
%D \macros
%D {startuniqueMPgraphic, uniqueMPgraphic}
%D
-%D This macros is probably of most use to myself, since I like
-%D to use graphics that adapt themselves. The next \METAPOST\
-%D kind of graphic is both unique and reused when possible.
+%D This macros is probably of most use to myself, since I like to use graphics that
+%D adapt themselves. The next \METAPOST\ kind of graphic is both unique and reused
+%D when possible.
%D
%D \starttyping
%D \defineoverlay[example][\uniqueMPgraphic{test}]
@@ -644,19 +614,17 @@
\MPcolor\overlaycolor :% % todo, expand once \m_overlaycolor
\MPcolor\overlaylinecolor} % todo, expand once \m_overlaylinecolor
-%D A better approach is to let additional variables play a role
-%D in determining the uniqueness. In the next macro, the
-%D second, optional, argument is used to guarantee the
-%D uniqueness, as well as prepare variables for passing them to
+%D A better approach is to let additional variables play a role in determining the
+%D uniqueness. In the next macro, the second, optional, argument is used to
+%D guarantee the uniqueness, as well as prepare variables for passing them to
%D \METAPOST.
%D
%D \starttyping
%D \startuniqueMPgraphic{meta:hash}{gap,angle,...}
%D \stoptyping
%D
-%D The calling macro also accepts a second argument. For
-%D convenient use in overlay definitions, we use \type {{}}
-%D instead of \type {[]}.
+%D The calling macro also accepts a second argument. For convenient use in overlay
+%D definitions, we use \type {{}} instead of \type {[]}.
%D
%D \starttyping
%D \uniqueMPgraphic{meta:hash}{gap=10pt,angle=30}
@@ -699,9 +667,6 @@
{\MPllx#2\MPlly#3\MPurx#4\MPury#5%
\hpack{\forcecolorhack\getobject{MP}{#1}}} % else no proper color intent
-% \unexpanded\def\meta_use_box
-% {\setobject{MP}}
-
\unexpanded\def\meta_use_box
{\setunreferencedobject{MP}}
@@ -723,9 +688,6 @@
\unexpanded\def\startuniqueMPgraphic
{\dodoublegroupempty\meta_start_unique_graphic}
-% \def\meta_start_unique_graphic#1#2#3\stopuniqueMPgraphic
-% {\setgvalue{\??mpgraphic#1}{\meta_handle_unique_graphic{#1}{#2}{#3}}}
-
\def\meta_start_unique_graphic#1%
{\normalexpanded{\meta_start_unique_graphic_indeed{#1}}}
@@ -888,12 +850,9 @@
%D {\edef\overlaystamp{\overlaystamp:\MPvariable{##1}}}%
%D \processcommalist[#1]\docommand}
%D \stoptyping
-
-%D Since we need to feed \METAPOST\ with expanded dimensions,
-%D we introduce a dedicated expansion engine.
-
-%D We redefine \type {\extendMPoverlaystamp} to preprocess
-%D variables using \type {\meta_prepare_variable}.
+%D
+%D Since we need to feed \METAPOST\ with expanded dimensions, we introduce a
+%D dedicated expansion engine. We redefine \type {\extendMPoverlaystamp} to
\unexpanded\def\extendMPoverlaystamp#1%
{\processcommalist[#1]\meta_extend_overlay_stamp}
@@ -909,40 +868,22 @@
% This will change ...
-\def\MPdataMPDfile{\jobname-mpgraph.mpd} % this one will become obsolete
-\def\MPdataMPOfile{\jobname-mpgraph.mpo}
\def\MPdataMPYfile{\jobname-mpgraph.mpy}
-% makempy.registerfile(filename)
-
-% % hm. extensions get expanded so the wrong names then end up in format
-%
-% \startMPinitializations
-% boolean collapse_data; collapse_data:=true ; % will be obsolete
-% def data_mpd_file = "\MPdataMPDfile" enddef ; % will go via lua
-% def data_mpo_file = "\MPdataMPOfile" enddef ;
-% def data_mpy_file = "\MPdataMPYfile" enddef ;
-% \stopMPinitializations
-
\startMPextensions
- boolean collapse_data; collapse_data:=true ; % will be obsolete
- def data_mpd_file = "\noexpand\MPdataMPDfile" enddef ; % will go via lua
- def data_mpo_file = "\noexpand\MPdataMPOfile" enddef ;
def data_mpy_file = "\noexpand\MPdataMPYfile" enddef ;
\stopMPextensions
-\unexpanded\def\getMPdata
- {\let\MPdata\secondoftwoarguments
- \startreadingfile
- % \startnointerference % no, else we need to do all data global
- \readlocfile\MPdataMPDfile\donothing\donothing
- % \stopnointerference
- \stopreadingfile}
+\unexpanded\def\getMPdata {\clf_getMPdata}
+ \let\rawMPdata \clf_getMPdata
-% We need this trick because we need to make sure that the tex scanner
-% sees newlines and does not quit. Also, we do need to flush the buffer
-% under a normal catcode regime in order to expand embedded tex macros.
-% As usual with buffers, \type {#1} can be a list.
+\unexpanded\def\getMPstored{\clf_getMPstored}
+ \let\rawMPstored \clf_getMPstored
+
+%D We need this trick because we need to make sure that the tex scanner sees
+%D newlines and does not quit. Also, we do need to flush the buffer under a normal
+%D catcode regime in order to expand embedded tex macros. As usual with buffers,
+%D \type {#1} can be a list.
\unexpanded\def\processMPbuffer
{\dosingleempty\meta_process_buffer}
@@ -961,22 +902,8 @@
%D \macros
%D {startMPenvironment, resetMPenvironment}
%D
-%D In order to synchronize the main \TEX\ run and the runs
-%D local to \METAPOST, environments can be passed.
-
-% \unexpanded\def\startMPenvironment % second arg gobbles spaces, so that reset gives \emptytoks
-% {\dodoubleempty\dostartMPenvironment}
-
-% \def\dostartMPenvironment[#1][#2]#3\stopMPenvironment
-% {\edef\m_meta_option{#1}
-% \ifx\m_meta_option\s!reset
-% \resetMPenvironment % reset mp toks
-% \else\ifx\m_meta_option\v!global
-% #3% % use in main doc too
-% \else\ifx\m_meta_option\!!plustoken
-% #3% % use in main doc too
-% \fi\fi\fi
-% \ctxlua{metapost.tex.set(\!!bs\detokenize{#3}\!!es)}}
+%D In order to synchronize the main \TEX\ run and the runs local to \METAPOST,
+%D environments can be passed.
\unexpanded\def\startMPenvironment
{\begingroup
@@ -1003,8 +930,7 @@
\unexpanded\def\useMPenvironmentbuffer[#1]%
{\clf_mptexsetfrombuffer{#1}}
-%D This command takes \type {[reset]} as optional
-%D argument.
+%D This command takes \type {[reset]} as optional argument.
%D
%D \starttyping
%D \startMPenvironment
@@ -1148,8 +1074,8 @@
\def\MPstring#1{"\begincsname\??mptext#1\endcsname\empty"}
\def\MPbetex #1{btex \begincsname\??mptext#1\endcsname\space etex}
-%D In order to communicate conveniently with the \TEX\
-%D engine, we introduce some typesetting variables.
+%D In order to communicate conveniently with the \TEX\ engine, we introduce some
+%D typesetting variables.
% \setupcolors[state=stop,conversion=never] % quite tricky ... type mismatch
@@ -1169,8 +1095,8 @@
% OverlayOffset:=\overlayoffset;
% \stopMPinitializations
-% A dirty trick, ** in colo-ini.lua (mpcolor). We cannot use a vardef, because
-% that fails with spot colors.
+%D A dirty trick, ** in colo-ini.lua (mpcolor). We cannot use a vardef, because
+%D that fails with spot colors.
\startMPinitializations
def OverlayLineColor=\ifx\overlaylinecolor\empty black \else\MPcolor{\overlaylinecolor} \fi enddef;
@@ -1241,21 +1167,11 @@
%D \macros
%D {PDFMPformoffset}
%D
-%D In \PDF, forms are clipped and therefore we have to take
-%D precautions to get this right. Since this is related to
-%D objects, we use the same offset as used there.
+%D In \PDF, forms are clipped and therefore we have to take precautions to get this
+%D right. Since this is related to objects, we use the same offset as used there.
\def\PDFMPformoffset{\objectoffset}
-% %D \macros
-% %D {insertMPfile}
-% %D
-% %D Bypassing the special driver and figure mechanism is not
-% %D that nice but saves upto 5\% time in embedding \METAPOST\
-% %D graphics by using the low level \PDF\ converter directly,
-% %D given of course that we use \PDFTEX. As a result we need to
-% %D fool around with the object trigger.
-
\newtoks\everyinsertMPfile
\startMPextensions
@@ -1295,19 +1211,17 @@
% if unknown context_bare : input mp-bare.mpiv ; fi ;
% \stopMPdefinitions
-%D And some more. These are not really needed since we
-%D don't use the normal figure inclusion macros any longer.
+%D And some more. These are not really needed since we don't use the normal figure
+%D inclusion macros any longer.
\appendtoks
\externalfigurepostprocessors\emptytoks % safeguard
\to \everyinsertMPfile
-%D We also take care of disabling fancy figure features, that
-%D can terribly interfere when dealing with symbols,
-%D background graphics and running (postponed) graphics.
-%D You won't believe me if I tell you what funny side effects
-%D can occur. One took me over a day to uncover when
-%D processing the screen version of the \METAFUN\ manual.
+%D We also take care of disabling fancy figure features, that can terribly interfere
+%D when dealing with symbols, background graphics and running (postponed) graphics.
+%D You won't believe me if I tell you what funny side effects can occur. One took me
+%D over a day to uncover when processing the screen version of the \METAFUN\ manual.
\def\doifelseMPgraphic#1%
{\ifcsname\??mpgraphic #1\endcsname \expandafter \firstoftwoarguments \else
@@ -1372,8 +1286,8 @@
%D \macros
%D {startstaticMPfigure,useMPstaticfigure}
%D
-%D Static figures are processed only when there has been
-%D something changed. Here is Aditya Mahajan's testcase:
+%D Static figures are processed only when there has been something changed. Here is
+%D Aditya Mahajan's testcase:
%D
%D \startbuffer
%D \startstaticMPfigure{circle}
@@ -1415,19 +1329,17 @@
\def\MPdivten[#1]{\withoutpt\the\dimexpr#1pt/10\relax}
-%D There is no way to distinguish the black color that you get when
-%D you issue a \type {draw} without color specification from a color
-%D that has an explicit black specification unless you set the
-%D variable \type {defaultcolormodel} to 1. Hoewever, in that case
-%D you cannot distinguish that draw from one with a \type
-%D {withoutcolor} specification. This means that we have to provide
-%D multiple variants of inheritance.
+%D There is no way to distinguish the black color that you get when you issue a
+%D \type {draw} without color specification from a color that has an explicit black
+%D specification unless you set the variable \type {defaultcolormodel} to 1.
+%D Hoewever, in that case you cannot distinguish that draw from one with a \type
+%D {withoutcolor} specification. This means that we have to provide multiple
+%D variants of inheritance.
%D
-%D In any case we need to tell the converter what the inherited color
-%D is to start with. Case~3 is kind of unpredictable as it closely
-%D relates to the order in which paths are flushed. If you want to
-%D inherit automatically from the surrounding, you can best stick to
-%D variant 1. Variant 0 (an isolated graphic) is the default.
+%D In any case we need to tell the converter what the inherited color is to start
+%D with. Case~3 is kind of unpredictable as it closely relates to the order in which
+%D paths are flushed. If you want to inherit automatically from the surrounding, you
+%D can best stick to variant 1. Variant 0 (an isolated graphic) is the default.
%D
%D \startbuffer
%D \startuseMPgraphic{test}
@@ -1541,10 +1453,9 @@
\installsetuponlycommandhandler \??MPgraphics {MPgraphics}
-%D Here we hook in the outer color. When \type {color} is set to \type
-%D {global} we get the outer color automatically. If you change this
-%D setting, you should do it grouped in order not to make other graphics
-%D behave in unexpected ways.
+%D Here we hook in the outer color. When \type {color} is set to \type {global} we
+%D get the outer color automatically. If you change this setting, you should do it
+%D grouped in order not to make other graphics behave in unexpected ways.
\appendtoks
\doifelse{\directMPgraphicsparameter\c!color}\v!global{\MPcolormethod\plusone}{\MPcolormethod\zerocount}%
diff --git a/tex/context/base/mkiv/meta-lua.lua b/tex/context/base/mkiv/meta-lua.lua
new file mode 100644
index 000000000..42d036630
--- /dev/null
+++ b/tex/context/base/mkiv/meta-lua.lua
@@ -0,0 +1,96 @@
+if not modules then modules = { } end modules ['meta-lua'] = {
+ version = 1.001,
+ comment = "companion to meta-lua.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- Don't use this code yet. I use it in some experimental rendering of graphics
+-- based on output from database queries. It's not that pretty but will be
+-- considered when the (similar) lmx code is redone. Also, dropping the print
+-- variant makes it nicer. This experiment is part of playing with several template
+-- mechanisms. (Also see trac-lmx.)
+
+local P, V, Cs, lpegmatch = lpeg.P, lpeg.V, lpeg.Cs, lpeg.match
+local formatters = string.formatters
+local concat = table.concat
+local load, pcall = load, pcall
+
+local errorformatter = formatters[ [[draw textext("\tttf error in template '%s'") ;]] ]
+local concatformatter = formatters[ [[local _t = { } local _n = 0 local p = function(s) _n = _n + 1 _t[_n] = s end %s return table.concat(_t," ")]] ]
+local appendformatter = formatters[ [[_n=_n+1 _t[_n]=%q]] ]
+
+local blua = P("blua ") / " "
+local elua = P(" elua") / " "
+local bluacode = P("") / " "
+
+local plua = (blua * (1 - elua)^1 * elua)
+local pluacode = (bluacode * (1 - eluacode)^1 * eluacode)
+
+-- local methods = {
+-- both = Cs { "start",
+-- start = (V("bluacode") + V("blua") + V("rest"))^0,
+-- blua = plua,
+-- bluacode = pluacode,
+-- rest = (1 - V("blua") - V("bluacode"))^1 / appendformatter,
+-- },
+-- xml = Cs { "start",
+-- start = (V("bluacode") + V("rest"))^0,
+-- bluacode = pluacode,
+-- rest = (1 - V("bluacode"))^1 / appendformatter,
+-- },
+-- xml = Cs ((pluacode + (1 - pluacode)^1 / appendformatter)^0),
+-- metapost = Cs { "start",
+-- start = (V("blua") + V("rest"))^0,
+-- blua = plua,
+-- rest = (1 - V("blua"))^1 / appendformatter,
+-- },
+-- }
+
+local methods = {
+ both = Cs ((pluacode + plua + (1 - plua - pluacode)^1 / appendformatter)^0),
+ xml = Cs ((pluacode + (1 - pluacode)^1 / appendformatter)^0),
+ metapost = Cs (( plua + (1 - plua )^1 / appendformatter)^0),
+}
+
+methods.mp = methods.metapost
+
+-- Unfortunately mp adds a suffix ... also weird is that successive loading
+-- of the same file gives issues. Maybe some weird buffering goes on (smells
+-- similar to older write / read issues).
+
+mplib.finders.mpstemplate = function(specification,name,mode,ftype)
+ local authority = specification.authority
+ local queries = specification.queries
+ local nameonly = file.nameonly(queries.name or "")
+ local method = file.nameonly(queries.method or "")
+ local pattern = methods[method] or methods.both
+ local data = nil
+ if nameonly == "" then
+ data = errorformatter("no name")
+ elseif authority == "file" then
+ local foundname = resolvers.findfile(nameonly)
+ if foundname ~= "" then
+ data = io.loaddata(foundname)
+ end
+ elseif authority == "buffer" then
+ data = buffers.getcontent(nameonly)
+ end
+ data = data and lpegmatch(pattern,data)
+ data = data and concatformatter(data)
+ data = data and load(data)
+ if data then
+ local okay
+ okay, data = pcall(data)
+ end
+ if not data or data == "" then
+ data = errorformatter(nameonly)
+ end
+ local name = luatex.registertempfile(nameonly,true)
+ local data = metapost.checktexts(data)
+ io.savedata(name,data)
+ return name
+end
+
diff --git a/tex/context/base/mkiv/meta-lua.mkiv b/tex/context/base/mkiv/meta-lua.mkiv
new file mode 100644
index 000000000..e06677d5e
--- /dev/null
+++ b/tex/context/base/mkiv/meta-lua.mkiv
@@ -0,0 +1,95 @@
+%D \module
+%D [ file=meta-lua,
+%D version=2012.07.23,
+%D title=\METAPOST\ Integrated Graphics,
+%D subtitle=Templates,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D Don't use this code yet. I use it in some experimental rendering of graphics
+%D based on output from database queries. It's not that pretty but will be
+%D considered when the (similar) lmx code is redone. Also, dropping the print
+%D variant makes it nicer. This experiment is part of playing with several template
+%D mechanisms. (Also see trac-lmx.)
+%D
+%D Note for myself: see if the (bar)chart code use in q2p can use this kind of
+%D magic. Basically we then need a channel to data.
+
+\writestatus{loading}{MetaPost Library Graphics / Templates}
+
+\registerctxluafile{meta-lua}{}
+
+\continueifinputfile{meta-lua.mkiv}
+
+\starttext
+
+% conforming btex ... etex
+
+\startbuffer[test-a]
+ blua for i=1,100,5 do elua
+ draw fullcircle scaled (blua p(i) elua * cm) withcolor green withpen pencircle scaled 4 ;
+ blua end elua
+
+ blua for i=1,100,10 do elua
+ draw fullcircle scaled (blua p(i) elua * cm) withcolor red withpen pencircle scaled 2 ;
+ blua end elua
+\stopbuffer
+
+\startMPpage[offset=10pt]
+ input "mpstemplate://buffer?name=test-a" ;
+\stopMPpage
+
+% conforming lmx
+
+\startbuffer[test-b]
+
+ draw fullcircle scaled ( * cm) withcolor green withpen pencircle scaled 4 ;
+
+
+
+ draw fullcircle scaled ( * cm) withcolor red withpen pencircle scaled 2 ;
+
+\stopbuffer
+
+\startMPpage[offset=10pt]
+ input "mpstemplate://buffer?name=test-b" ;
+\stopMPpage
+
+\startMPpage[offset=10pt]
+ picture p[] ; % we can't input nested
+ input "mpstemplate://buffer?name=test-a&method=metapost" ;
+ p[1] := currentpicture ; currentpicture := nullpicture ;
+ input "mpstemplate://buffer?name=test-b&method=xml" ;
+ p[2] := currentpicture ; currentpicture := nullpicture ;
+ draw p[1] ysized 3cm ;
+ draw p[2] ysized 4cm shifted (4cm,0) ;
+\stopMPpage
+
+% a mixture (using a wrapped input)
+
+\startMPpage[offset=10pt]
+ draw image (loadfile("mpstemplate://buffer?name=test-a&method=metapost")) ysized 3cm shifted (4cm,0cm) ;
+ draw image (loadfile("mpstemplate://buffer?name=test-b&method=xml")) ysized 3cm shifted (0cm,4cm) ;
+ draw loadimage ("mpstemplate://buffer?name=test-a&method=metapost") ysized 4cm shifted (4cm,4cm) ;
+ draw loadimage ("mpstemplate://buffer?name=test-b&method=xml") ysized 4cm shifted (0cm,0cm) ;
+\stopMPpage
+
+% conforming myself
+
+\startluacode
+ context.startMPpage { offset = "10pt" }
+ for i=1,100,5 do
+ context("draw fullcircle scaled (%s * cm) withcolor green withpen pencircle scaled 4 ;",i)
+ end
+ for i=1,100,10 do
+ context("draw fullcircle scaled (%s * cm) withcolor red withpen pencircle scaled 2 ;",i)
+ end
+ context.stopMPpage()
+\stopluacode
+
+\stoptext
diff --git a/tex/context/base/mkiv/meta-nod.mkiv b/tex/context/base/mkiv/meta-nod.mkiv
new file mode 100644
index 000000000..5c7b3d503
--- /dev/null
+++ b/tex/context/base/mkiv/meta-nod.mkiv
@@ -0,0 +1,241 @@
+%D \module
+%D [ file=meta-nod,
+%D version=2016.11.23,
+%D title=\METAPOST\ Graphics,
+%D subtitle=Nodes,
+%D author={Alan Braslau and Hans Hagen},
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\unprotect
+
+\defineMPinstance
+ [nodes]
+ [\s!format=metafun,
+ \s!extensions=\v!yes,
+ \s!initializations=\v!yes,
+ \c!method=\s!double]
+
+\defineframed
+ [node]
+ [\c!frame=\v!off]
+
+\defineframed
+ [smallnode]
+ [node]
+ [\c!foregroundstyle=\small]
+
+% \startMPdefinitions{nodes}
+% loadmodule "node" ;
+% \stopMPdefinitions
+
+\protect
+
+%D This module provides a simple \TEX\ layer on top of the nodes library that
+%D is preloaded in \METAFUN. More information can be found in the manual and
+%D article.
+%D
+%D We strongly advice to use the \METAPOST\ interface and only provide this
+%D \TEX\ variant as proof of concept. There are no plans to extend this module
+%D because we see no advantage in using a \TEX\ interface over a \METAPOST\
+%D one.
+%D
+%D \startbuffer
+%D \startnodes
+%D \placenode[0,0]{A}
+%D \placenode[1,0]{B}
+%D \connectnodes [0,1] [option=doublearrow]
+%D \stopnodes
+%D
+%D \startnodes[dx=2cm,rotation=45]
+%D \placenode[0,0]{A}
+%D \placenode[1,0]{B}
+%D \placenode[1,1]{C}
+%D \placenode[0,1]{D}
+%D \connectnodes [0,2] [option=doublearrow,label=a,offset=.05]
+%D \connectnodes [1,3] [option=doublearrows]
+%D \stopnodes
+%D
+%D \startnodes[dx=2cm,dy=2cm]
+%D \nodeMPcode{ahlength := 12pt ; ahangle := 30 ; ahvariant := 1 ;}
+%D \mpcode{ahlength := 12pt ; ahangle := 30 ; ahvariant := 1 ;}
+%D \placenode[0,1]{\node{A}}
+%D \placenode[1,1]{\node{B}}
+%D \placenode[0,0]{\node{C}}
+%D \placenode[1,0]{\node{D}}
+%D \connectnodes [0,3] [option=arrow,label={ }]
+%D \connectnodes [1,2] [option=arrow]
+%D \stopnodes
+%D \stopbuffer
+%D
+%D \typebuffer
+%D
+%D \startlinecorrection \getbuffer \stoplinecorrection
+
+\unprotect
+
+\installcorenamespace{metanodes}
+\installcorenamespace{metanodesoption}
+\installcorenamespace{metanodesalternative}
+\installcorenamespace{metanodesposition}
+
+% todo: maybe instances
+
+\installparameterhandler \??metanodes {metanodes}
+\installsetuphandler \??metanodes {metanodes}
+
+\setupmetanodes
+ [\c!option =,
+ \c!alternative =,
+ \c!offset =0,
+ \c!position =,
+ \c!label =,
+ \c!dx =2\emwidth,
+ \c!dy =2\emwidth,
+ \c!rotation =90,
+ \c!rulethickness =.5pt,
+ \c!command =]
+
+\newtoks\t_every_meta_nodes
+\newtoks\t_meta_nodes
+
+\let\stopnodes\relax
+
+\unexpanded\def\startnodes
+ {\dosingleempty\meta_nodes_start}
+
+\def\meta_nodes_start[#1]#2\stopnodes
+ {\hpack\bgroup
+ \iffirstargument
+ \setupcurrentmetanodes[#1]%
+ \fi
+ \edef\p_meta_option{\metanodesparameter\c!option}%
+ \edef\p_meta_alternative{\metanodesparameter\c!alternative}%
+ \the\t_every_meta_nodes
+ \t_meta_nodes\emptytoks
+ #2\removeunwantedspaces
+ % for alan, will be commented:
+ \writestatus{metanodes}{\detokenize\expandafter{\the\t_meta_nodes}}%
+ \startMPcode
+ mfun_node_init(%
+ \the\dimexpr\metanodesparameter\c!dx\relax,%
+ \the\dimexpr\metanodesparameter\c!dy\relax,%
+ \metanodesparameter\c!rotation%
+ ) ;
+ \the\t_meta_nodes ;
+ mfun_node_flush ;
+ \stopMPcode
+ \egroup}
+
+% \unexpanded\def\grph_nodes_node[#1,#2]#3%
+% {\etoksapp\t_meta_nodes{mfun_node_make(\number#1,\number#2,"\metanodesparameter\c!command{#3}");}}
+
+\unexpanded\def\grph_nodes_node
+ {\dodoubleempty\grph_nodes_node_two}
+
+\def\grph_nodes_node_two[#1][#2]%
+ {\grph_nodes_node_two_indeed[#1][#2]}
+
+\def\grph_nodes_node_two_indeed[#1,#2][#3]#4%
+ {\begingroup
+ \ifsecondargument
+ \setupcurrentmetanodes[#3]%
+ \fi
+ \edef\p_label{#4}%
+ \normalexpanded{\endgroup\noexpand\etoksapp\t_meta_nodes{%
+ mfun_node_make(\number#1,\number#2%
+ \ifx\p_label\empty \else
+ ,"\metanodesparameter\c!command{\p_label}"%
+ \fi
+ );%
+ }}}
+
+\appendtoks
+ \let\placenode\grph_nodes_node
+\to \t_every_meta_nodes
+
+\unexpanded\def\grph_nodes_fromto
+ {\dodoubleempty\grph_nodes_fromto_two}
+
+\def\grph_nodes_fromto_two[#1][#2]%
+ {\grph_nodes_fromto_two_indeed[#1][#2]}% get rid of {n,m}
+
+\letvalue{\??metanodesposition }\empty
+\setvalue{\??metanodesposition top}{.top}
+\setvalue{\??metanodesposition bottom}{.bot}
+\setvalue{\??metanodesposition left}{.lft}
+\setvalue{\??metanodesposition right}{.rt}
+\setvalue{\??metanodesposition upperleft}{.ulft}
+\setvalue{\??metanodesposition topleft}{.ulft}
+\setvalue{\??metanodesposition lefttop}{.ulft}
+\setvalue{\??metanodesposition upperright}{.urt}
+\setvalue{\??metanodesposition topright}{.urt}
+\setvalue{\??metanodesposition righttop}{.urt}
+\setvalue{\??metanodesposition lowerleft}{.llft}
+\setvalue{\??metanodesposition bottomleft}{.llft}
+\setvalue{\??metanodesposition leftbottom}{.llft}
+\setvalue{\??metanodesposition lowerright}{.lrt}
+\setvalue{\??metanodesposition bottomright}{.lrt}
+\setvalue{\??metanodesposition rightbottom}{.lrt}
+
+\def\grph_nodes_fromto_two_indeed[#1,#2][#3]% we can't group because etoksapp doesn't like that
+ {\begingroup
+ \ifsecondargument
+ \setupcurrentmetanodes[#3]%
+ \fi
+ \edef\p_label {\metanodesparameter\c!label}%
+ \edef\p_rulethickness{\metanodesparameter\c!rulethickness}%
+ \edef\p_command {\metanodesparameter\c!command}% better get an error early
+ \normalexpanded{\endgroup\noexpand\etoksapp\t_meta_nodes{%
+ \ifcsname\??metanodesalternative\metanodesparameter\c!alternative\endcsname
+ \lastnamedcs
+ \else
+ \csname\??metanodesalternative\endcsname
+ \fi
+ \space
+ mfun_nodes_fromto\begincsname\??metanodesposition\metanodesparameter\c!position\endcsname(%
+ \metanodesparameter\c!offset,%
+ \number#1,\number#2%
+ \ifx\p_label\empty \else
+ ,"\ifx\p_command\empty\p_label\else\p_command{\p_label}\fi"%
+ \fi
+ )%
+ \space
+ \ifcsname\??metanodesoption\metanodesparameter\c!option\endcsname
+ \lastnamedcs
+ \else
+ \csname\??metanodesoption\endcsname
+ \fi
+ \space
+ \ifx\p_rulethickness\empty \else
+ withpen pencircle scaled \the\dimexpr\p_rulethickness\relax
+ \fi
+ ;%
+ }}}
+
+\setvalue{\??metanodesalternative }{draw}
+\setvalue{\??metanodesalternative arrow}{drawarrow}
+\setvalue{\??metanodesalternative doublearrow}{drawdblarrow}
+\setvalue{\??metanodesalternative doublearrows}{drawdoublearrows}
+
+\setvalue{\??metanodesoption }{}
+\setvalue{\??metanodesoption dashed}{dashed evenly}
+\setvalue{\??metanodesoption dotted}{dashed withdots scaled .5}
+
+\appendtoks
+ \let\connectnodes\grph_nodes_fromto
+\to \t_every_meta_nodes
+
+\unexpanded\def\grph_nodes_code#1%
+ {\etoksapp\t_meta_nodes{#1}}
+
+\appendtoks
+ \let\nodeMPcode\grph_nodes_code
+ \let\mpcode\grph_nodes_code
+\to \t_every_meta_nodes
+
+\protect \endinput
diff --git a/tex/context/base/mkiv/meta-pdf.lua b/tex/context/base/mkiv/meta-pdf.lua
index c17a2a4c7..4a185cebd 100644
--- a/tex/context/base/mkiv/meta-pdf.lua
+++ b/tex/context/base/mkiv/meta-pdf.lua
@@ -13,15 +13,19 @@ if not modules then modules = { } end modules ['meta-pdf'] = {
-- We can make it even more efficient if needed, but as we don't use this
-- code often in \MKIV\ it makes no sense.
+local tonumber = tonumber
local concat, unpack = table.concat, table.unpack
local gsub, find, byte, gmatch, match = string.gsub, string.find, string.byte, string.gmatch, string.match
local lpegmatch = lpeg.match
local round = math.round
local formatters, format = string.formatters, string.format
-local report_mptopdf = logs.reporter("graphics","mptopdf")
+local mplib = mplib
+local metapost = metapost
+local lpdf = lpdf
+local context = context
-local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
+local report_mptopdf = logs.reporter("graphics","mptopdf")
local texgetattribute = tex.getattribute
@@ -31,7 +35,7 @@ local pdfgraycode = lpdf.graycode
local pdfspotcode = lpdf.spotcode
local pdftransparencycode = lpdf.transparencycode
local pdffinishtransparencycode = lpdf.finishtransparencycode
-local pdfliteral = nodes.pool.pdfliteral
+----- pdfpageliteral = nodes.pool.pdfpageliteral
metapost.mptopdf = metapost.mptopdf or { }
local mptopdf = metapost.mptopdf
@@ -64,7 +68,7 @@ resetall()
-- -- comment hack
--
-- local function pdfcode(str)
--- context(pdfliteral(str))
+-- context(pdfpageliteral(str))
-- end
local pdfcode = context.pdfliteral
@@ -527,7 +531,7 @@ local captures_old = ( space + verbose + preamble )^0
local captures_new = ( space + verbose + procset + preamble )^0
local function parse(m_data)
- if find(m_data,"%%%%BeginResource: procset mpost") then
+ if find(m_data,"%%BeginResource: procset mpost",1,true) then
-- report_mptopdf("using sparse scanner, case 1")
lpegmatch(captures_new,m_data)
elseif find(m_data,"%%%%BeginProlog%s*%S+(.-)%%%%EndProlog") then
diff --git a/tex/context/base/mkiv/meta-pdf.mkiv b/tex/context/base/mkiv/meta-pdf.mkiv
index 14c97042e..38ffb72f8 100644
--- a/tex/context/base/mkiv/meta-pdf.mkiv
+++ b/tex/context/base/mkiv/meta-pdf.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\registerctxluafile{meta-pdf}{1.003}
+\registerctxluafile{meta-pdf}{}
\writestatus{loading}{MetaPost Graphics / MPS to PDF}
diff --git a/tex/context/base/mkiv/meta-pdh.lua b/tex/context/base/mkiv/meta-pdh.lua
index 5040715c4..afd1576cb 100644
--- a/tex/context/base/mkiv/meta-pdh.lua
+++ b/tex/context/base/mkiv/meta-pdh.lua
@@ -30,7 +30,9 @@ local concat, format, find, gsub, gmatch = table.concat, string.format, string.f
local tostring, tonumber, select = tostring, tonumber, select
local lpegmatch = lpeg.match
-local metapost = metapost
+metapost = metapost or { }
+local metapost = metapost
+local context = context
metapost.mptopdf = metapost.mptopdf or { }
local mptopdf = metapost.mptopdf
@@ -98,7 +100,7 @@ function mptopdf.steps.strip() -- .3 per expr
end
local name, version = gmatch(preamble,"%%%%Creator: +(.-) +(.-) ")
mptopdf.version = tostring(version or "0")
- if find(preamble,"/hlw{0 dtransform") then
+ if find(preamble,"/hlw{0 dtransform",1,true) then
mptopdf.shortcuts = true
end
-- the boundingbox specification needs to come before data, well, not really
@@ -587,7 +589,7 @@ do
local captures_new = ( space + procset + preamble + verbose )^0
function mptopdf.parsers.lpeg()
- if find(mptopdf.data,"%%%%BeginResource: procset mpost") then
+ if find(mptopdf.data,"%%BeginResource: procset mpost",1,true) then
lpegmatch(captures_new,mptopdf.data)
else
lpegmatch(captures_old,mptopdf.data)
diff --git a/tex/context/base/mkiv/meta-pdh.mkiv b/tex/context/base/mkiv/meta-pdh.mkiv
index 3787a5638..f9eff73ca 100644
--- a/tex/context/base/mkiv/meta-pdh.mkiv
+++ b/tex/context/base/mkiv/meta-pdh.mkiv
@@ -29,7 +29,7 @@
%
% timings may differ now that we revamped the backend
-\registerctxluafile{meta-pdf}{1.003}
+\registerctxluafile{meta-pdf}{}
%D We will clean up the color mess later.
@@ -268,8 +268,8 @@
%D {0.001} is the first path and \type {0.010} the tenth. Since
%D \METAPOST strips trailing zeros, we have to padd the string.
-\newif\ifMPcmykcolors
-\newif\ifMPspotcolors
+% \newif\ifMPcmykcolors
+% \newif\ifMPspotcolors
%D Specials:
diff --git a/tex/context/base/mkiv/meta-tex.lua b/tex/context/base/mkiv/meta-tex.lua
index 71207975d..d66eab069 100644
--- a/tex/context/base/mkiv/meta-tex.lua
+++ b/tex/context/base/mkiv/meta-tex.lua
@@ -6,262 +6,161 @@ if not modules then modules = { } end modules ['meta-tex'] = {
license = "see context related readme files"
}
-local tostring = tostring
-local format, gsub, find, match = string.format, string.gsub, string.find, string.match
+local tostring, tonumber = tostring, tonumber
+local format = string.format
local formatters = string.formatters
local P, S, R, C, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.match
-metapost = metapost or { }
+metapost = metapost or { }
+local metapost = metapost
+local context = context
local implement = interfaces.implement
--- local left = P("[")
--- local right = P("]")
--- local space = P(" ")
--- local argument = left * C((1-right)^1) * right
--- local pattern = (argument + space)^0
+do
--- function metapost.sometxt(optional,str)
--- if optional == "" then
--- context.sometxta(str)
--- else
--- local one, two = lpegmatch(pattern,optional)
--- if two then
--- context.sometxtc(one,two,str)
--- elseif one then
--- context.sometxtb(one,str)
--- else
--- context.sometxta(str)
--- end
--- end
--- end
+ local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char
-local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char
-
-function metapost.escaped(str)
- context(lpegmatch(pattern,str))
-end
-
-implement {
- name = "metapostescaped",
- actions = metapost.escaped,
- arguments = "string"
-}
-
-local simplify = true
-
--- local function strip(n,e)
--- -- get rid of e(0)
--- -- get rid of e(+*)
--- e = gsub(e,"^+","")
--- -- remove leading zeros
--- e = gsub(e,"^([+-]*)0+(%d)","%1%2")
--- if not simplify then
--- -- take it as it is
--- elseif n == "1" then
--- return format("10^{%s}",e)
--- end
--- return format("%s\\times10^{%s}",n,e)
--- end
---
--- function metapost.format_n(fmt,...)
--- fmt = gsub(fmt,"@","%%")
--- local initial, hasformat, final = match(fmt,"^(.-)(%%.-[%a])(.-)$")
--- if hasformat then
--- str = format(fmt,...)
--- str = gsub(str,"(.-)e(.-)$",strip)
--- str = format("%s\\mathematics{%s}%s",initial,str,final)
--- elseif not find(fmt,"%%") then
--- str = format("%"..fmt,...)
--- str = gsub(str,"(.-)e(.-)$",strip)
--- str = format("\\mathematics{%s}",str)
--- end
--- context(str)
--- end
-
--- todo: proper lpeg
-
--- local function strip(n,e)
--- -- get rid of e(0)
--- -- get rid of e(+*)
--- e = gsub(e,"^+","")
--- -- remove leading zeros
--- e = gsub(e,"^([+-]*)0+(%d)","%1%2")
--- if not simplify then
--- -- take it as it is
--- elseif n == "1" then
--- return format("\\mathematics{10^{%s}}",e)
--- end
--- return format("\\mathematics{%s\\times10^{%s}}",n,e)
--- end
---
--- function metapost.format_n(fmt,...)
--- fmt = gsub(fmt,"@","%%")
--- if find(fmt,"%%") then
--- str = format(fmt,...)
--- else -- yes or no
--- str = format("%"..fmt,...)
--- end
--- str = gsub(str,"([%-%+]-[%.%d]+)e([%-%+]-[%.%d]+)",strip)
--- context(str)
--- end
---
--- function metapost.format_v(fmt,str)
--- metapost.format_n(fmt,metapost.untagvariable(str,false))
--- end
+ function metapost.escaped(str)
+ context(lpegmatch(pattern,str))
+ end
--- -- --
+ implement {
+ name = "metapostescaped",
+ actions = metapost.escaped,
+ arguments = "string"
+ }
-local number = C((S("+-")^0 * R("09","..")^1))
-local enumber = number * S("eE") * number
+end
-local cleaner = Cs((P("@@")/"@" + P("@")/"%%" + P(1))^0)
+do
-context = context or { exponent = function(...) print(...) end }
+ local simplify = true
+ local number = C((S("+-")^0 * R("09","..")^1))
+ local enumber = number * S("eE") * number
+ local cleaner = Cs((P("@@")/"@" + P("@")/"%%" + P(1))^0)
-function metapost.format_string(fmt,...)
- context(lpegmatch(cleaner,fmt),...)
-end
-
-function metapost.format_number(fmt,num)
- if not num then
- num = fmt
- fmt = "%e"
+ local function format_string(fmt,...)
+ context(lpegmatch(cleaner,fmt),...)
end
- local number = tonumber(num)
- if number then
- local base, exponent = lpegmatch(enumber,formatters[lpegmatch(cleaner,fmt)](number))
- if base and exponent then
- context.MPexponent(base,exponent)
+
+ local function format_number(fmt,num)
+ if not num then
+ num = fmt
+ fmt = "%e"
+ end
+ local number = tonumber(num)
+ if number then
+ local base, exponent = lpegmatch(enumber,formatters[lpegmatch(cleaner,fmt)](number))
+ if base and exponent then
+ context.MPexponent(base,exponent)
+ else
+ context(number)
+ end
else
- context(number)
+ context(tostring(num))
end
- else
- context(tostring(num))
end
-end
-
--- This is experimental and will change!
-
-function metapost.svformat(fmt,str)
- metapost.format_string(fmt,metapost.untagvariable(str,false))
-end
-function metapost.nvformat(fmt,str)
- metapost.format_number(fmt,metapost.untagvariable(str,false))
-end
-
-implement { name = "metapostformatted", actions = metapost.svformat, arguments = { "string", "string" } }
-implement { name = "metapostgraphformat", actions = metapost.nvformat, arguments = { "string", "string" } }
+ -- This is experimental and will change!
--- kind of new
+ metapost.format_string = format_string
+ metapost.format_number = format_number
-local f_exponent = formatters["\\MPexponent{%s}{%s}"]
+ function metapost.svformat(fmt,str)
+ format_string(fmt,metapost.untagvariable(str,false))
+ end
-local mpformatters = table.setmetatableindex(function(t,k)
- local v = formatters[lpegmatch(cleaner,k)]
- t[k] = v
- return v
-end)
+ function metapost.nvformat(fmt,str)
+ format_number(fmt,metapost.untagvariable(str,false))
+ end
-function metapost.texexp(num,bfmt,efmt)
- local number = tonumber(num)
- if number then
- local base, exponent = lpegmatch(enumber,format("%e",number))
- if base and exponent then
- if bfmt then
- -- base = formatters[lpegmatch(cleaner,bfmt)](base)
- base = mpformatters[bfmt](base)
- else
- base = format("%f",base)
- end
- if efmt then
- -- exponent = formatters[lpegmatch(cleaner,efmt)](exponent)
- exponent = mpformatters[efmt](exponent)
+ local f_exponent = formatters["\\MPexponent{%s}{%s}"]
+
+ -- can be a weak one: mpformatters
+
+ local mpformatters = table.setmetatableindex(function(t,k)
+ local v = formatters[lpegmatch(cleaner,k)]
+ t[k] = v
+ return v
+ end)
+
+ function metapost.texexp(num,bfmt,efmt)
+ local number = tonumber(num)
+ if number then
+ local base, exponent = lpegmatch(enumber,format("%e",number))
+ if base and exponent then
+ if bfmt then
+ -- base = formatters[lpegmatch(cleaner,bfmt)](base)
+ base = mpformatters[bfmt](base)
+ else
+ base = format("%f",base)
+ end
+ if efmt then
+ -- exponent = formatters[lpegmatch(cleaner,efmt)](exponent)
+ exponent = mpformatters[efmt](exponent)
+ else
+ exponent = format("%i",exponent)
+ end
+ return f_exponent(base,exponent)
+ elseif bfmt then
+ -- return formatters[lpegmatch(cleaner,bfmt)](number)
+ return mpformatters[bfmt](number)
else
- exponent = format("%i",exponent)
+ return number
end
- return f_exponent(base,exponent)
- elseif bfmt then
- -- return formatters[lpegmatch(cleaner,bfmt)](number)
- return mpformatters[bfmt](number)
else
- return number
+ return num
end
- else
- return num
end
-end
-
--- not in context a namespace
-
-if _LUAVERSION < 5.2 then
- utilities.strings.formatters.add(formatters,"texexp", [[texexp(...)]], "local texexp = metapost.texexp")
-else
- utilities.strings.formatters.add(formatters,"texexp", [[texexp(...)]], { texexp = metapost.texexp })
-end
--- print(string.formatters["%!3.3!texexp!"](10.4345E30))
--- print(string.formatters["%3!texexp!"](10.4345E30,"%2.3f","%2i"))
--- print(string.formatters["%2!texexp!"](10.4345E30,"%2.3f"))
--- print(string.formatters["%1!texexp!"](10.4345E30))
--- print(string.formatters["%!texexp!"](10.4345E30))
+ implement {
+ name = "metapostformatted",
+ actions = metapost.svformat,
+ arguments = { "string", "string" }
+ }
--- local function test(fmt,n)
--- logs.report("mp format test","fmt: %s, n: %s, result: %s, \\exponent{%s}{%s}",fmt,n,
--- formatters[lpegmatch(cleaner,fmt)](n),
--- lpegmatch(enumber,formatters[lpegmatch(cleaner,fmt)](n))
--- )
--- end
---
--- test("@j","1e-8")
--- test("@j",1e-8)
--- test("@j","1e+8")
--- test("@j","1e-10")
--- test("@j",1e-10)
--- test("@j","1e+10")
--- test("@j","1e-12")
--- test("@j","1e+12")
--- test("@j","1e-0")
--- test("@j","1e+0")
--- test("@j","1")
--- test("@j test","1")
--- test("@j","-1")
--- test("@j","1e-102")
--- test("@1.4j","1e+102")
--- test("@j","1.2e+102")
--- test("@j","1.23e+102")
--- test("@j","1.234e+102")
+ implement {
+ name = "metapostgraphformat",
+ actions = metapost.nvformat,
+ arguments = { "string", "string" }
+ }
-local f_textext = formatters[ [[textext("%s")]] ]
-local f_mthtext = formatters[ [[textext("\mathematics{%s}")]] ]
-local f_exptext = formatters[ [[textext("\mathematics{%s\times10^{%s}}")]] ]
+ if LUAVERSION < 5.2 then
+ utilities.strings.formatters.add(formatters,"texexp", [[texexp(...)]], "local texexp = metapost.texexp")
+ else
+ utilities.strings.formatters.add(formatters,"texexp", [[texexp(...)]], { texexp = metapost.texexp })
+ end
--- local cleaner = Cs((P("\\")/"\\\\" + P("@@")/"@" + P("@")/"%%" + P(1))^0)
+ local f_textext = formatters[ [[textext("%s")]] ]
+ local f_mthtext = formatters[ [[textext("\mathematics{%s}")]] ]
+ local f_exptext = formatters[ [[textext("\mathematics{%s\times10^{%s}}")]] ]
-local mpprint = mp.print
+ local mpprint = mp.print
-function mp.format(fmt,str) -- bah, this overloads mp.format in mlib-lua.lua
- fmt = lpegmatch(cleaner,fmt)
- mpprint(f_textext(formatters[fmt](metapost.untagvariable(str,false))))
-end
+ function mp.format(fmt,str) -- bah, this overloads mp.format in mlib-lua.lua
+ fmt = lpegmatch(cleaner,fmt)
+ mpprint(f_textext(formatters[fmt](metapost.untagvariable(str,false))))
+ end
-function mp.formatted(fmt,...) -- svformat
- fmt = lpegmatch(cleaner,fmt)
- mpprint(f_textext(formatters[fmt](...)))
-end
+ function mp.formatted(fmt,...) -- svformat
+ fmt = lpegmatch(cleaner,fmt)
+ mpprint(f_textext(formatters[fmt](...)))
+ end
-function mp.graphformat(fmt,num) -- nvformat
- fmt = lpegmatch(cleaner,fmt)
- local number = tonumber(num)
- if number then
- local base, exponent = lpegmatch(enumber,number)
- if base and exponent then
- mpprint(f_exptext(base,exponent))
+ function mp.graphformat(fmt,num) -- nvformat
+ fmt = lpegmatch(cleaner,fmt)
+ local number = tonumber(num)
+ if number then
+ local base, exponent = lpegmatch(enumber,number)
+ if base and exponent then
+ mpprint(f_exptext(base,exponent))
+ else
+ mpprint(f_mthtext(num))
+ end
else
- mpprint(f_mthtext(num))
+ mpprint(f_textext(tostring(num)))
end
- else
- mpprint(f_textext(tostring(num)))
end
+
end
diff --git a/tex/context/base/mkiv/meta-tex.mkiv b/tex/context/base/mkiv/meta-tex.mkiv
index 8d24f687c..03ad5ee39 100644
--- a/tex/context/base/mkiv/meta-tex.mkiv
+++ b/tex/context/base/mkiv/meta-tex.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\registerctxluafile{meta-tex}{1.001}
+\registerctxluafile{meta-tex}{}
\unprotect
diff --git a/tex/context/base/mkiv/metatex.lus b/tex/context/base/mkiv/metatex.lus
deleted file mode 100644
index df7bc1914..000000000
--- a/tex/context/base/mkiv/metatex.lus
+++ /dev/null
@@ -1,9 +0,0 @@
-if not modules then modules = { } end modules ['metatex'] = {
- version = 1.001,
- comment = "companion to metatex.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return "luat-cod.lua"
diff --git a/tex/context/base/mkiv/metatex.tex b/tex/context/base/mkiv/metatex.tex
index b5f54c4ee..7c8a7ff01 100644
--- a/tex/context/base/mkiv/metatex.tex
+++ b/tex/context/base/mkiv/metatex.tex
@@ -22,144 +22,9 @@
%D A format is generated with the command;
%D
%D \starttyping
-%D luatools --make --compile metatex
+%D mtxrun --script metatex --make
%D \stoptyping
%D
-%D Remark: this is far from complete. We will gradually add
-%D more. Also, it's not yet clean what exactly will be part
-%D of it. This is a prelude to a configureable macro package.
-
-\catcode`\{=1 \catcode`\}=2 \catcode`\#=6
-
-\edef\metatexformat {\jobname}
-\edef\metatexversion{2007.04.03 13:01}
-
-\let\fmtname \metatexformat
-\let\fmtversion\metatexversion
-
-\ifx\normalinput\undefined \let\normalinput\input \fi
-
-\def\loadcorefile#1{\normalinput#1.tex \relax}
-\def\loadmarkfile#1{\normalinput#1.mkiv\relax}
-
-\loadmarkfile{syst-ini}
-
-\ifnum\luatexversion<60 % also change message
- \writestatus{!!!!}{Your luatex binary is too old, you need at least version 0.60!}
- \expandafter\end
-\fi
-
-\newtoks\metatexversiontoks \metatexversiontoks\expandafter{\metatexversion} % at the lua end
-
-\loadmarkfile{syst-pln} % plain tex initializations of internal registers (no further code)
-\loadmarkfile{syst-mes}
-
-\loadmarkfile{luat-cod} %
-\loadmarkfile{luat-bas} %
-\loadmarkfile{luat-lib} %
-
-% needs stripping:
-
-\loadmarkfile{catc-ini} % catcode table management
-\loadmarkfile{catc-act} % active character definition mechanisms
-\loadmarkfile{catc-def} % some generic catcode tables
-\loadmarkfile{catc-ctx} % a couple of context specific tables but expected by later modules
-\loadmarkfile{catc-sym} % some definitions related to \letter
-
-% helpers, maybe less
-
-\loadmarkfile{syst-aux} % a whole lot of auxiliary macros
-%loadmarkfile{syst-lua} % some helpers using lua instead
-%loadmarkfile{syst-con} % some rather basic conversions
-%loadmarkfile{syst-fnt}
-%loadmarkfile{syst-str}
-%loadmarkfile{syst-rtp}
-
-% not needed
-
-% \loadmarkfile{supp-fil}
-% \loadmarkfile{supp-dir}
-
-% characters
-
-\loadmarkfile{char-utf}
-\loadmarkfile{char-ini}
-\loadmarkfile{char-enc} % \registerctxluafile{char-enc}{1.001}
-
-% attributes
-
-\loadmarkfile{attr-ini}
-
-% nodes
-
-\loadmarkfile{node-ini}
-%loadmarkfile{node-fin}
-%loadmarkfile{node-par}
-
-% attributes, not needed:
-
-%loadmarkfile{attr-ini}
-
-% regimes
-
-% \loadmarkfile{regi-ini}
-% \loadcorefile{regi-syn}
-
-% languages
-
-% fonts
-
-% \loadcorefile{enco-ini.mkiv}
-% \loadcorefile{hand-ini.mkiv}
-
-\registerctxluafile{font-ini}{1.001}
-
-\registerctxluafile{node-fnt}{1.001}
-
-\registerctxluafile{font-enc}{1.001}
-\registerctxluafile{font-map}{1.001}
-\registerctxluafile{font-syn}{1.001}
-\registerctxluafile{font-tfm}{1.001}
-\registerctxluafile{font-afm}{1.001}
-\registerctxluafile{font-cid}{1.001}
-\registerctxluafile{font-ott}{1.001}
-\registerctxluafile{font-otf}{1.001}
-\registerctxluafile{font-otb}{1.001}
-\registerctxluafile{font-otn}{1.001}
-\registerctxluafile{font-ota}{1.001}
-\registerctxluafile{font-otp}{1.001}
-\registerctxluafile{font-otc}{1.001}
-%registerctxluafile{font-vf} {1.001}
-\registerctxluafile{font-def}{1.001}
-%registerctxluafile{font-ctx}{1.001}
-\registerctxluafile{font-xtx}{1.001}
-%registerctxluafile{font-fbk}{1.001}
-%registerctxluafile{font-ext}{1.001}
-\registerctxluafile{font-pat}{1.001}
-%registerctxluafile{font-chk}{1.001}
-
-%registerctxluafile{math-ini}{1.001}
-%registerctxluafile{math-dim}{1.001}
-%registerctxluafile{math-ent}{1.001}
-%registerctxluafile{math-ext}{1.001}
-%registerctxluafile{math-vfu}{1.001}
-%registerctxluafile{math-map}{1.001}
-%registerctxluafile{math-noa}{1.001}
-
-\registerctxluafile{task-ini}{1.001}
-
-%registerctxluafile{l-xml}{1.001} % needed for font database
-
-% why not ...
-
-\pdfoutput\plusone
-
-% too
-
-\appendtoks
- \ctxlua{statistics.savefmtstatus("\jobname","\metatexversion","metatex.tex")}% can become automatic
-\to \everydump
-
-% done
-
-\errorstopmode \dump \endinput
+%D For the moment this is a placeholder. Maybe some day ... the old
+%D file history/metatex/metatex.tex so I can pick up from there if
+%D needed.
diff --git a/tex/context/base/mkiv/mlib-ctx.lua b/tex/context/base/mkiv/mlib-ctx.lua
index 96eb27cbd..0ff28c1c4 100644
--- a/tex/context/base/mkiv/mlib-ctx.lua
+++ b/tex/context/base/mkiv/mlib-ctx.lua
@@ -6,12 +6,10 @@ if not modules then modules = { } end modules ['mlib-ctx'] = {
license = "see context related readme files",
}
--- for the moment we have the scanners here but they migh tbe moved to
--- the other modules
-
local type, tostring = type, tostring
local format, concat = string.format, table.concat
local settings_to_hash = utilities.parsers.settings_to_hash
+local formatters = string.formatters
local report_metapost = logs.reporter("metapost")
@@ -20,9 +18,8 @@ local stoptiming = statistics.stoptiming
local mplib = mplib
-metapost = metapost or {}
+metapost = metapost or { }
local metapost = metapost
-
local context = context
local setters = tokens.setters
@@ -87,10 +84,6 @@ function metapost.getextensions(instance,state)
end
end
--- function commands.getmpextensions(instance,state)
--- context(metapost.getextensions(instance,state))
--- end
-
implement {
name = "setmpextensions",
actions = metapost.setextensions,
@@ -139,7 +132,7 @@ implement {
end
}
--- metapost.variables = { } -- to be stacked
+-- metapost.variables = { } -- to be stacked
implement {
name = "mprunvar",
@@ -202,6 +195,39 @@ function metapost.graphic(specification)
metapost.graphic_base_pass(setmpsformat(specification))
end
+function metapost.startgraphic(t)
+ if not t then
+ t = { }
+ end
+ if not t.instance then
+ t.instance = metapost.defaultinstance
+ end
+ if not t.format then
+ t.format = metapost.defaultformat
+ end
+ if not t.method then
+ t.method = metapost.defaultmethod
+ end
+ if not t.definitions then
+ t.definitions = ""
+ end
+ t.data = { }
+ return t
+end
+
+function metapost.stopgraphic(t)
+ if t then
+ t.data = concat(t.data or { },"\n")
+ metapost.graphic(t)
+ t.data = ""
+ end
+end
+
+function metapost.tographic(t,f,s,...)
+ local d = t.data
+ d[#d+1] = s and formatters[f](s,...) or f
+end
+
implement {
name = "mpgraphic",
actions = metapost.graphic,
diff --git a/tex/context/base/mkiv/mlib-ctx.mkiv b/tex/context/base/mkiv/mlib-ctx.mkiv
index a7bb612c8..1f05b6ef8 100644
--- a/tex/context/base/mkiv/mlib-ctx.mkiv
+++ b/tex/context/base/mkiv/mlib-ctx.mkiv
@@ -16,10 +16,10 @@
\writestatus{loading}{MetaPost Library Graphics / Initializations}
-\registerctxluafile{mlib-run}{1.001}
-\registerctxluafile{mlib-ctx}{1.001}
-\registerctxluafile{mlib-lua}{1.001}
-\registerctxluafile{mlib-int}{1.001} % here ?
+\registerctxluafile{mlib-run}{}
+\registerctxluafile{mlib-ctx}{}
+\registerctxluafile{mlib-lua}{}
+\registerctxluafile{mlib-int}{} % here ?
\unprotect
diff --git a/tex/context/base/mkiv/mlib-int.lua b/tex/context/base/mkiv/mlib-int.lua
index 108002929..bd3ba213f 100644
--- a/tex/context/base/mkiv/mlib-int.lua
+++ b/tex/context/base/mkiv/mlib-int.lua
@@ -17,6 +17,8 @@ local mpcolor = attributes.colors.mpcolor
local emwidths = fonts.hashes.emwidths
local exheights = fonts.hashes.exheights
+local mpgetdimen = mp.getdimen
+
function mp.PaperHeight () mpprint(getdimen("paperheight") *factor) end
function mp.PaperWidth () mpprint(getdimen("paperwidth") *factor) end
function mp.PrintPaperHeight () mpprint(getdimen("printpaperheight") *factor) end
@@ -61,10 +63,15 @@ function mp.LayoutColumnWidth () mpprint(getdimen("layoutcolumnwidth") *fact
function mp.SpineWidth () mpprint(getdimen("spinewidth") *factor) end
function mp.PaperBleed () mpprint(getdimen("paperbleed") *factor) end
-function mp.PageNumber () mpprint(getcount("pageno")) end
function mp.RealPageNumber () mpprint(getcount("realpageno")) end
+function mp.LastPageNumber () mpprint(getcount("lastpageno")) end
+
+function mp.PageNumber () mpprint(getcount("pageno")) end
function mp.NOfPages () mpprint(getcount("lastpageno")) end
+function mp.SubPageNumber () mpprint(getcount("subpageno")) end
+function mp.NOfSubPages () mpprint(getcount("lastsubpageno")) end
+
function mp.CurrentColumn () mpprint(getcount("mofcolumns")) end
function mp.NOfColumns () mpprint(getcount("nofcolumns")) end
diff --git a/tex/context/base/mkiv/mlib-lua.lua b/tex/context/base/mkiv/mlib-lua.lua
index 9831efc20..5e6be614b 100644
--- a/tex/context/base/mkiv/mlib-lua.lua
+++ b/tex/context/base/mkiv/mlib-lua.lua
@@ -13,11 +13,12 @@ if not modules then modules = { } end modules ['mlib-lua'] = {
local type, tostring, select, loadstring = type, tostring, select, loadstring
local find, match, gsub, gmatch = string.find, string.match, string.gsub, string.gmatch
-local formatters = string.formatters
-local concat = table.concat
-local lpegmatch = lpeg.match
+local formatters = string.formatters
+local concat = table.concat
+local lpegmatch = lpeg.match
+local lpegpatterns = lpeg.patterns
-local P, S, Ct = lpeg.P, lpeg.S, lpeg.Ct
+local P, S, Ct, Cs, Cc, C = lpeg.P, lpeg.S, lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.C
local report_luarun = logs.reporter("metapost","lua")
local report_message = logs.reporter("metapost")
@@ -67,7 +68,7 @@ local f_pair = formatters["(%.16f,%.16f)"]
local f_triplet = formatters["(%.16f,%.16f,%.16f)"]
local f_quadruple = formatters["(%.16f,%.16f,%.16f,%.16f)"]
-local function mpprint(...)
+local function mpprint(...) -- we can optimize for n=1
for i=1,select("#",...) do
local value = select(i,...)
if value ~= nil then
@@ -86,7 +87,44 @@ local function mpprint(...)
end
end
-mp.print = mpprint
+local r = P('%') / "percent"
+ + P('"') / "dquote"
+ + P('\n') / "crlf"
+ -- + P(' ') / "space"
+local a = Cc("&")
+local q = Cc('"')
+local p = Cs(q * (r * a)^-1 * (a * r * (P(-1) + a) + P(1))^0 * q)
+
+local function mpvprint(...) -- variable print
+ for i=1,select("#",...) do
+ local value = select(i,...)
+ if value ~= nil then
+ n = n + 1
+ local t = type(value)
+ if t == "number" then
+ buffer[n] = f_numeric(value)
+ elseif t == "string" then
+ buffer[n] = lpegmatch(p,value)
+ elseif t == "table" then
+ local m = #t
+ if m == 2 then
+ buffer[n] = f_pair(unpack(t))
+ elseif m == 3 then
+ buffer[n] = f_triplet(unpack(t))
+ elseif m == 4 then
+ buffer[n] = f_quadruple(unpack(t))
+ else -- error
+ buffer[n] = ""
+ end
+ else -- boolean or whatever
+ buffer[n] = tostring(value)
+ end
+ end
+ end
+end
+
+mp.print = mpprint
+mp.vprint = mpvprint
-- We had this:
--
@@ -204,7 +242,7 @@ local replacer = lpeg.replacer("@","%%")
function mp.fprint(fmt,...)
n = n + 1
- if not find(fmt,"%%") then
+ if not find(fmt,"%",1,true) then
fmt = lpegmatch(replacer,fmt)
end
buffer[n] = formatters[fmt](...)
@@ -213,12 +251,14 @@ end
local function mpquoted(fmt,s,...)
n = n + 1
if s then
- if not find(fmt,"%%") then
+ if not find(fmt,"%",1,true) then
fmt = lpegmatch(replacer,fmt)
end
- buffer[n] = '"' .. formatters[fmt](s,...) .. '"'
+ -- buffer[n] = '"' .. formatters[fmt](s,...) .. '"'
+ buffer[n] = lpegmatch(p,formatters[fmt](s,...))
elseif fmt then
- buffer[n] = '"' .. fmt .. '"'
+ -- buffer[n] = '"' .. fmt .. '"'
+ buffer[n] = lpegmatch(p,fmt)
else
-- something is wrong
end
@@ -230,8 +270,8 @@ function mp.n(t)
return type(t) == "table" and #t or 0
end
-local whitespace = lpeg.patterns.whitespace
-local newline = lpeg.patterns.newline
+local whitespace = lpegpatterns.whitespace
+local newline = lpegpatterns.newline
local setsep = newline^2
local comment = (S("#%") + P("--")) * (1-newline)^0 * (whitespace - setsep)^0
local value = (1-whitespace)^1 / tonumber
@@ -274,24 +314,6 @@ end
-- endfor ;
-- \stopMPpage
--- function metapost.runscript(code)
--- local f = loadstring(f_code(code))
--- if f then
--- local result = f()
--- if result then
--- local t = type(result)
--- if t == "number" then
--- return f_numeric(result)
--- elseif t == "string" then
--- return result
--- else
--- return tostring(result)
--- end
--- end
--- end
--- return ""
--- end
-
local cache, n = { }, 0 -- todo: when > n then reset cache or make weak
function metapost.runscript(code)
@@ -339,7 +361,7 @@ function metapost.runscript(code)
report_luarun("no result")
end
else
- report_luarun("no result, invalid code")
+ report_luarun("no result, invalid code: %s",code)
end
return ""
end
@@ -479,13 +501,15 @@ function mp.prefix(str)
mpquoted(match(str,"^(.-)[%d%[]") or str)
end
-function mp.dimensions(str)
- local n = 0
- for s in gmatch(str,"%[?%-?%d+%]?") do --todo: lpeg
- n = n + 1
- end
- mpprint(n)
-end
+-- function mp.dimension(str)
+-- local n = 0
+-- for s in gmatch(str,"%[?%-?%d+%]?") do --todo: lpeg
+-- n = n + 1
+-- end
+-- mpprint(n)
+-- end
+
+mp.dimension = lpeg.counter(P("[") * lpegpatterns.integer * P("]") + lpegpatterns.integer,mpprint)
-- faster and okay as we don't have many variables but probably only
-- basename makes sense and even then it's not called that often
@@ -647,3 +671,46 @@ do
end
end
+
+do
+
+ local mpvprint = mp.vprint
+
+ local stores = { }
+
+ function mp.newstore(name)
+ stores[name] = { }
+ end
+
+ function mp.disposestore(name)
+ stores[name] = nil
+ end
+
+ function mp.tostore(name,key,value)
+ stores[name][key] = value
+ end
+
+ function mp.fromstore(name,key)
+ mpvprint(stores[name][key]) -- type specific
+ end
+
+ interfaces.implement {
+ name = "getMPstored",
+ arguments = { "string", "string" },
+ actions = function(name,key)
+ context(stores[name][key])
+ end
+ }
+
+end
+
+do
+
+ local mpprint = mp.print
+ local texmodes = tex.modes
+
+ function mp.processingmode(s)
+ mpprint(tostring(texmodes[s]))
+ end
+
+end
diff --git a/tex/context/base/mkiv/mlib-pdf.lua b/tex/context/base/mkiv/mlib-pdf.lua
index 0c2945316..75f810fb3 100644
--- a/tex/context/base/mkiv/mlib-pdf.lua
+++ b/tex/context/base/mkiv/mlib-pdf.lua
@@ -216,13 +216,14 @@ end
local function flushnormalpath(path, t, open)
local pth, ith, nt
+ local length = #path
if t then
nt = #t
else
t = { }
nt = 0
end
- for i=1,#path do
+ for i=1,length do
nt = nt + 1
pth = path[i]
if not ith then
@@ -242,7 +243,7 @@ local function flushnormalpath(path, t, open)
else
t[nt] = f_l(one.x_coord,one.y_coord)
end
- elseif #path == 1 then
+ elseif length == 1 then
-- special case .. draw point
local one = path[1]
nt = nt + 1
@@ -253,6 +254,7 @@ end
local function flushconcatpath(path, t, open)
local pth, ith, nt
+ local length = #path
if t then
nt = #t
else
@@ -261,7 +263,7 @@ local function flushconcatpath(path, t, open)
end
nt = nt + 1
t[nt] = f_cm(sx,rx,ry,sy,tx,ty)
- for i=1,#path do
+ for i=1,length do
nt = nt + 1
pth = path[i]
if not ith then
@@ -285,7 +287,7 @@ local function flushconcatpath(path, t, open)
else
t[nt] = f_l(mpconcat(one.x_coord,one.y_coord))
end
- elseif #path == 1 then
+ elseif length == 1 then
-- special case .. draw point
nt = nt + 1
local one = path[1]
@@ -576,7 +578,7 @@ function metapost.flush(result,flusher,askedfig)
result[#result+1] = open and "S" or "h S"
end
elseif objecttype == "both" then
- result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath
+ result[#result+1] = evenodd and "h B*" or "h B" -- B* = eo -- b includes closepath
end
end
if transformed then
@@ -611,7 +613,7 @@ function metapost.flush(result,flusher,askedfig)
elseif objecttype == "outline" then
result[#result+1] = open and "S" or "h S"
elseif objecttype == "both" then
- result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath
+ result[#result+1] = evenodd and "h B*" or "h B" -- B* = eo -- b includes closepath
end
if transformed then
result[#result+1] = "Q"
diff --git a/tex/context/base/mkiv/mlib-pdf.mkiv b/tex/context/base/mkiv/mlib-pdf.mkiv
index 5875c7635..147b67f74 100644
--- a/tex/context/base/mkiv/mlib-pdf.mkiv
+++ b/tex/context/base/mkiv/mlib-pdf.mkiv
@@ -16,7 +16,7 @@
% We use bit more code that needed because we want to limit the
% amount of boxing.
-\registerctxluafile{mlib-pdf}{1.001}
+\registerctxluafile{mlib-pdf}{}
%D Some code is shared between MPLIB and MPS. The following variables
%D are also available for introspection and other purposes.
@@ -31,7 +31,7 @@
\ifdefined\MPbox \else \newbox \MPbox \fi
-\def\setMPboundingbox#1#2#3#4% at some point we might pass them as base or scaled points
+\unexpanded\def\setMPboundingbox#1#2#3#4% at some point we might pass them as base or scaled points
{\global\MPllx #1\onebasepoint
\global\MPlly #2\onebasepoint
\global\MPurx #3\onebasepoint
@@ -39,7 +39,7 @@
\global\MPwidth \dimexpr\MPurx-\MPllx\relax
\global\MPheight\dimexpr\MPury-\MPlly\relax}
-\def\resetMPboundingbox
+\unexpanded\def\resetMPboundingbox
{\global\MPwidth \zeropoint
\global\MPheight\zeropoint
\global\MPllx \zeropoint
@@ -47,14 +47,14 @@
\global\MPurx \zeropoint
\global\MPury \zeropoint}
-\def\repositionMPboxindeed
+\unexpanded\def\repositionMPboxindeed
{\setbox\MPbox\hpack\bgroup
\kern-\MPllx
\raise-\MPlly
\box\MPbox
\egroup}
-\def\repositionMPbox
+\unexpanded\def\repositionMPbox
{\ifzeropt\MPllx
\ifzeropt\MPlly
% okay
@@ -65,7 +65,7 @@
\repositionMPboxindeed
\fi}
-\def\finalizeMPbox
+\unexpanded\def\finalizeMPbox
{\repositionMPbox
\setbox\MPbox\vpack to \MPheight\bgroup
\vfill
@@ -76,7 +76,7 @@
\wd\MPbox\MPwidth
\ht\MPbox\MPheight}
-\def\MPtextext#1#2#3#4#5% beware: we use a different method now (see mlib-pps)
+\unexpanded\def\MPtextext#1#2#3#4#5% beware: we use a different method now (see mlib-pps)
{\begingroup
\setbox\MPbox\hbox{\font\temp=#1\space at #2\onebasepoint \let\c\char \temp #3}% text
\MPllx-#4\onebasepoint
@@ -88,9 +88,9 @@
% MPLIB specific:
-\def\MPLIBtoPDF{\clf_mpflushliteral}
+\def\MPLIBtoPDF{\clf_mpflushliteral} % expanded
-\def\startMPLIBtoPDF#1#2#3#4%
+\unexpanded\def\startMPLIBtoPDF#1#2#3#4%
{\meta_process_graphic_figure_start
\dostarttagged\t!mpgraphic\empty
\naturalhpack attr \imageattribute \plusone \bgroup
@@ -100,7 +100,7 @@
% \forgetall % already done elsewhere
\noindent} % forces the proper cm in the backend
-\def\stopMPLIBtoPDF
+\unexpanded\def\stopMPLIBtoPDF
{\egroup
\finalizeMPbox
\box\MPbox
@@ -128,8 +128,8 @@
\let\normalstartMPLIBtoPDF\startMPLIBtoPDF
\let\normalstopMPLIBtoPDF \stopMPLIBtoPDF
-\def\directstartMPLIBtoPDF{\startTEXpage\normalstartMPLIBtoPDF}
-\def\directstopMPLIBtoPDF {\normalstopMPLIBtoPDF\stopTEXpage}
+\unexpanded\def\directstartMPLIBtoPDF{\startTEXpage\normalstartMPLIBtoPDF}
+\unexpanded\def\directstopMPLIBtoPDF {\normalstopMPLIBtoPDF\stopTEXpage}
\unexpanded\def\directMPgraphic
{\dodoublegroupempty\mlib_direct_graphic}
diff --git a/tex/context/base/mkiv/mlib-pps.lua b/tex/context/base/mkiv/mlib-pps.lua
index 9fc8fec35..bb5ce31e5 100644
--- a/tex/context/base/mkiv/mlib-pps.lua
+++ b/tex/context/base/mkiv/mlib-pps.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['mlib-pps'] = {
}
local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split
-local tonumber, type, unpack = tonumber, type, unpack
+local tonumber, type, unpack, next = tonumber, type, unpack, next
local round, sqrt, min, max = math.round, math.sqrt, math.min, math.max
local insert, remove, concat = table.insert, table.remove, table.concat
local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg
diff --git a/tex/context/base/mkiv/mlib-pps.mkiv b/tex/context/base/mkiv/mlib-pps.mkiv
index a2eb44826..c9d181bf9 100644
--- a/tex/context/base/mkiv/mlib-pps.mkiv
+++ b/tex/context/base/mkiv/mlib-pps.mkiv
@@ -13,7 +13,7 @@
\unprotect
-\registerctxluafile{mlib-pps}{1.001}
+\registerctxluafile{mlib-pps}{}
%D Todo: catch nested graphics like external figures with dummies.
diff --git a/tex/context/base/mkiv/mlib-run.lua b/tex/context/base/mkiv/mlib-run.lua
index 93ce1fec2..18bc7e4da 100644
--- a/tex/context/base/mkiv/mlib-run.lua
+++ b/tex/context/base/mkiv/mlib-run.lua
@@ -29,7 +29,7 @@ approach is way faster than an external and processing time
nears zero.
--ldx]]--
-local type, tostring, tonumber = type, tostring, tonumber
+local type, tostring, tonumber, next = type, tostring, tonumber, next
local gsub, match, find = string.gsub, string.match, string.find
local striplines = utilities.strings.striplines
local concat, insert, remove = table.concat, table.insert, table.remove
diff --git a/tex/context/base/mkiv/mtx-context-arrange.tex b/tex/context/base/mkiv/mtx-context-arrange.tex
index a3e6e9db3..e970ac450 100644
--- a/tex/context/base/mkiv/mtx-context-arrange.tex
+++ b/tex/context/base/mkiv/mtx-context-arrange.tex
@@ -27,8 +27,8 @@
% --printformat : 2UP, etc
% --paperformat=spec : paper*print or paperxprint
%
-% example: context --extra=arrange --printformat=2UP --paperformat=A4*A3,landscape myfile
-%
+% example: context --extra=arrange --printformat=2UP --paperformat=A4*A3,landscape myfile
+% context --extra=arrange --printformat=xy --paperformat=A4*A2 --nx=2 --ny=2 myfile
% end help
\input mtx-context-common.tex
@@ -79,6 +79,20 @@
local printformat = document.arguments.printformat or ""
if printformat == "" then
printformat = "normal"
+ elseif string.find(printformat,"xy") then
+ if false then
+ context.setuplayout {
+ nx = document.arguments.nx or 1,
+ ny = document.arguments.ny or 1,
+ }
+ printformat = "XY,\\v!rotated"
+ else
+ context.setuppaper {
+ nx = document.arguments.nx or 1,
+ ny = document.arguments.ny or 1,
+ }
+ printformat = "XY"
+ end
elseif string.find(printformat,".*up") then
printformat = "2UP,\\v!rotated"
elseif string.find(printformat,".*down") then
@@ -107,7 +121,7 @@
local textwidth = arguments.textwidth or "0cm"
for i=1,noffiles do
local filename = files[i]
- if not string.find(filename,"^mtx%-context%-") then
+ if not string.find(file.basename(filename),"^mtx%-context%-") then
context.insertpages (
{ filename },
{ emptypages },
diff --git a/tex/context/base/mkiv/mtx-context-compare.tex b/tex/context/base/mkiv/mtx-context-compare.tex
new file mode 100644
index 000000000..ffa744013
--- /dev/null
+++ b/tex/context/base/mkiv/mtx-context-compare.tex
@@ -0,0 +1,113 @@
+%D \module
+%D [ file=mtx-context-compare,
+%D version=2015.07.14,
+%D title=\CONTEXT\ Extra Trickry,
+%D subtitle=Compare Files,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% begin help
+%
+% usage: context --extra=compare [options] file-1 file-2
+%
+% example: context --extra=compare file1.pdf file-2.pdf
+%
+% end help
+
+\input mtx-context-common.tex
+
+\starttext
+
+\starttexdefinition unexpanded ShowBoth #1#2#3
+ \startTEXpage
+ \startoverlay
+ {\externalfigure[#1][page=#3]}
+ {\externalfigure[#2][page=#3]}
+ \stopoverlay
+ \stopTEXpage
+\stoptexdefinition
+
+\starttexdefinition unexpanded ShowPage #1#2
+ \startTEXpage
+ \externalfigure[#1][page=#2]
+ \stopTEXpage
+\stoptexdefinition
+
+
+\startluacode
+
+local report = logs.reporter("compare")
+
+local fileone = document.files[1] or ""
+local filetwo = document.files[2] or ""
+
+if fileone == "" or filetwo == "" then
+ report("provide two filenames")
+ os.exit()
+end
+
+if not lfs.isfile(fileone) then
+ report("unknown file %a",fileone)
+ os.exit()
+end
+
+if not lfs.isfile(filetwo) then
+ report("unknown file %a",filetwo)
+ os.exit()
+end
+
+local function check(name)
+ local fig = figures.push { name = name }
+ figures.identify()
+ figures.check()
+ local used = fig.used
+ figures.pop()
+ return used
+end
+
+local one = check(fileone)
+local two = check(filetwo)
+
+if not one then
+ report("invalid file %a",fileone)
+ os.exit()
+end
+
+if not two then
+ report("invalid file %a",filetwo)
+ os.exit()
+end
+
+local n_one = tonumber(one.pages)
+local n_two = tonumber(two.pages)
+
+if not n_one or n_one ~= n_two then
+ report("files have different nofpages (%s vs %s)",n_one or "?",n_two or "?")
+end
+
+if n_one > n_two then
+ for i=1,n_two do
+ context.ShowBoth(fileone,filetwo,i)
+ end
+ for i=n_two+1,n_one do
+ context.ShowPage(fileone,i)
+ end
+else
+ for i=1,n_one do
+ context.ShowBoth(fileone,filetwo,i)
+ end
+ for i=n_one+1,n_two do
+ context.ShowPage(filetwo,i)
+ end
+end
+
+\stopluacode
+
+\stoptext
+
+\endinput
diff --git a/tex/context/base/mkiv/mtx-context-domotica.tex b/tex/context/base/mkiv/mtx-context-domotica.tex
index 62e6e8786..83562ee30 100644
--- a/tex/context/base/mkiv/mtx-context-domotica.tex
+++ b/tex/context/base/mkiv/mtx-context-domotica.tex
@@ -33,6 +33,36 @@
%
% end help
+%D In case one wonders what domotica has to do with ConTeXt, here is the short
+%D story. One day I'll wrap up a long one.
+%D
+%D After years of keeping an eye on developments and techniques and being somewhat
+%D disappointed by experiments, I decided to settle on a local approach for simple
+%D domotica (criteria are: stability, full open source, decent scripting, future
+%D safe). Eventually I decides to buy a few (overpriced) hue zigbee hubs: one
+%D private and one for the office, so that I could create different lightning
+%D setups, automatically control light to be turned on and off, etc. Unfortunately
+%D those hubs are rather limited in functionality and performance, which is
+%D surprising for an otherwise mature product. So (we're speaking mid 2015) I wrote
+%D a couple of scripts in \LUA\ that would do the real magic, and only use the hub
+%D for controlling the individual lights, buttons and sensors. That way I could
+%D create complex arrangements (think of setups for working, reading, talking,
+%D either of not in parts or rooms) driven by the available buttons and motion
+%D sensors. I really needed multiple sensors and buttons per room, something (again
+%D surprisingly) not supported by the hub at that time. It seems that more than a
+%D year later functionality that I needed and wrote gets added stepwise to the hub:
+%D multiple sensors, multiple use of buttons, etc. Compared to free \TEX\
+%D developments such commercial products evolve slow.
+%D
+%D In addition to these hubs I bought some zwave devices for controlling heating and
+%D a few rf radio things for sunshades. For zwave I uses the same approach: buy a
+%D decent hub (the nice popp hub) and control it via \LUA. In fact, I can now use
+%D one set of scripts to control a mix of technologies. However, when programming
+%D the lot, one needs to have an overview of devices and that is where this module
+%D comes into view. In fact, \LUATEX\ was already in view as I wrote the scripts in
+%D \LUA, using the \CONTEXT\ helper libraries. And the lots runs on a small low
+%D power (<10W) fitlet using stock \LUATEX\ as \LUA\ engine.
+
% --pattern="e:/domotica/open-zwave/open-zwave-master/config/**.xml"
\input mtx-context-common.tex
diff --git a/tex/context/base/mkiv/mtx-context-listing.tex b/tex/context/base/mkiv/mtx-context-listing.tex
index 41e468e1f..f7c3d2868 100644
--- a/tex/context/base/mkiv/mtx-context-listing.tex
+++ b/tex/context/base/mkiv/mtx-context-listing.tex
@@ -97,12 +97,15 @@
context.usemodule { "scite" }
end
- if #document.files > 0 then
+ local done = false
+ local files = document.files
+
+ if #files > 0 then
if document.arguments.sort then
- table.sort(document.files)
+ table.sort(files)
end
- for i=1,#document.files do
- local filename = document.files[i]
+ for i=1,#files do
+ local filename = files[i]
if not string.find(filename,"^mtx%-context%-") then
local pretty = document.arguments.pretty
if pretty == true then
@@ -129,9 +132,12 @@
else
context.typefile(filename)
end
+ done = true
end
end
- else
+ end
+
+ if not done then
context("no files given")
end
diff --git a/tex/context/base/mkiv/mtx-context-meaning.tex b/tex/context/base/mkiv/mtx-context-meaning.tex
new file mode 100644
index 000000000..88b81ef04
--- /dev/null
+++ b/tex/context/base/mkiv/mtx-context-meaning.tex
@@ -0,0 +1,87 @@
+%D \module
+%D [ file=mtx-context-meaning,
+%D version=2018.01.12,
+%D title=\CONTEXT\ Extra Trickry,
+%D subtitle=Show Meanings,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+% begin help
+%
+% usage: context --extra=meaning [options] list-of-commands
+%
+% --pattern : match given pattern
+%
+% example: context --extra=meaning --pattern=*paper*
+% context --extra=meaning setuplayout
+% context --extra=meaning setup_layout
+% context --extra=meaning --once --noconsole --nostatistics setuplayout
+% mtxrun --silent --script context --extra=meaning --once --noconsole --nostatistics setuplayout
+% mtxrun --script interface --meaning setuplayout
+%
+% end help
+
+\input mtx-context-common.tex
+
+\setupbodyfont
+ [dejavu]
+
+\def\showmeaning#1%
+ {\ctxlua{logs.pushtarget("both")}%
+ \writestatus{meaning}{\strippedcsname#1}%
+ \writestring{}%
+ \writestring{\expandafter\meaning\begincsname#1\endcsname}%
+ \ctxlua{logs.poptarget()}}
+
+\starttext
+
+\usemodule[setups-macros]
+
+\startluacode
+ local h = tex.hashtokens()
+
+ local function showmeaning(str)
+ local h = interfaces.macros.collect(str)
+ context("pattern: ")
+ context.type(str)
+ context.blank()
+ for i=1,#h do
+ local hi = h[i]
+ context.type(hi)
+ context.par()
+ context.showmeaning(hi)
+ end
+ context.page()
+ end
+
+ local done = false
+ local pattern = document.arguments.pattern
+
+ if pattern then
+ pattern = { pattern}
+ else
+ pattern = document.files
+ end
+
+ if type(pattern) == "table" then
+ table.sort(pattern)
+ for i=1,#pattern do
+ local p = pattern[i]
+ if not string.find(p,"^mtx%-context%-") then
+ done = true
+ showmeaning(p)
+ end
+ end
+ end
+
+ if not done then
+ context("no search pattern given")
+ end
+\stopluacode
+
+\stoptext
diff --git a/tex/context/base/mkiv/mtx-context-module.tex b/tex/context/base/mkiv/mtx-context-module.tex
new file mode 100644
index 000000000..f8b6d0a54
--- /dev/null
+++ b/tex/context/base/mkiv/mtx-context-module.tex
@@ -0,0 +1,166 @@
+%D \module
+%D [ file=mtx-context-modules,
+%D version=2018.02.24, % very old stuff, now also as extra
+%D title=\CONTEXT\ Extra Trickry,
+%D subtitle=Module TYpesetting,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is a \TEXEXEC\ features that has been moved to \MKIV.
+
+% begin help
+%
+% usage: context --extra=listing [options] list-of-files
+%
+% end help
+
+\input mtx-context-common.tex
+
+\usemodule[scite]
+\usemodule[module-basic]
+
+\dontcomplain
+
+\starttext
+
+\startluacode
+ local find, gsub, match, sub = string.find, string.gsub, string.match, string.sub
+ local formatters, strip, splitlines, is_empty = string.formatters, string.strip, string.splitlines, string.is_empty
+
+ local types = {
+ mkiv = "tex",
+ mkii = "tex",
+ cld = "lua",
+ lfg = "lua",
+ mpiv = "mp",
+ mpii = "mp",
+ }
+
+ local function process(inpname,filetype)
+ local data = io.loaddata(inpname)
+ if data and data ~= "" then
+ local result = { }
+ local skiplevel = 0
+ local indocument = false
+ local indefinition = false
+ local started = false
+ local settings = formatters["type=%s"](filetype)
+ local preamble, n = lpeg.match(lpeg.Cs((1-lpeg.patterns.newline^2)^1) * lpeg.Cp(),data)
+ local r = 0
+ if preamble then
+ preamble = match(preamble,"\\module.-%[(.-)%]")
+ if preamble then
+ preamble = gsub(preamble,"%%D *","")
+ preamble = gsub(preamble,"%%(.-)[\n\r]","")
+ preamble = gsub(preamble,"[\n\r]","")
+ preamble = strip(preamble)
+ settings = formatters["%s,%s"](settings,preamble)
+ data = sub(data,n,#data)
+ end
+ end
+ local lines = splitlines(data)
+ r = r + 1 ; result[r] = formatters["\\startmoduledocumentation[%s]"](settings)
+ for i=1,#lines do
+ local line = lines[i]
+ if find(line,"^%%D ") or find(line,"^%%D$") then
+ if skiplevel == 0 then
+ local someline = #line < 3 and "" or sub(line,4,#line)
+ if indocument then
+ r = r + 1 ; result[r] = someline
+ else
+ if indefinition then
+ r = r + 1 ; result[r] = "\\stopdefinition"
+ indefinition = false
+ end
+ if not indocument then
+ r = r + 1 ; result[r] = "\\startdocumentation"
+ end
+ r = r + 1 ; result[r] = someline
+ indocument = true
+ end
+ end
+ elseif find(line,"^%%M ") or find(line,"^%%M$") then
+ if skiplevel == 0 then
+ local someline = (#line < 3 and "") or sub(line,4,#line)
+ r = r + 1 ; result[r] = someline
+ end
+ elseif find(line,"^%%S B") then
+ skiplevel = skiplevel + 1
+ elseif find(line,"^%%S E") then
+ skiplevel = skiplevel - 1
+ elseif find(line,"^%%") then
+ -- nothing
+ elseif skiplevel == 0 then
+ inlocaldocument = indocument
+ inlocaldocument = false
+ local someline = line
+ if indocument then
+ r = r + 1 ; result[r] = "\\stopdocumentation"
+ indocument = false
+ end
+ if indefinition then
+ if is_empty(someline) then
+ r = r + 1 ; result[r] = "\\stopdefinition"
+ indefinition = false
+ else
+ r = r + 1 ; result[r] = someline
+ end
+ elseif not is_empty(someline) then
+ r = r + 1 ; result[r] = "\\startdefinition"
+ indefinition = true
+ if inlocaldocument then
+ -- nothing
+ else
+ r = r + 1 ; result[r] = someline
+ end
+ end
+ end
+ end
+ if indocument then
+ r = r + 1 ; result[r] = "\\stopdocumentation"
+ end
+ if indefinition then
+ r = r + 1 ; result[r] = "\\stopdefinition"
+ end
+ r = r + 1 ; result[r] = "\\stopmoduledocumentation"
+ result = table.concat(result,"\r")
+ buffers.assign("module",result)
+ context.getbuffer { "module" }
+ end
+ end
+
+ local pattern = document.arguments.pattern
+
+ if pattern then
+ document.files = dir.glob(pattern)
+ end
+
+ local done = false
+ local files = document.files
+
+ if #files > 0 then
+ if document.arguments.sort then
+ table.sort(files)
+ end
+ for i=1,#files do
+ local filename = files[i]
+ if not find(filename,"^mtx%-context%-") then
+ local suffix = file.extname(filename) or ""
+ process(filename,types[suffix] or suffix)
+ done = true
+ end
+ end
+ end
+
+ if not done then
+ context("no files given")
+ end
+
+\stopluacode
+
+\stoptext
diff --git a/tex/context/base/mkiv/mult-aux.lua b/tex/context/base/mkiv/mult-aux.lua
index 353b5e69c..bcd5ae5be 100644
--- a/tex/context/base/mkiv/mult-aux.lua
+++ b/tex/context/base/mkiv/mult-aux.lua
@@ -7,6 +7,7 @@ if not modules then modules = { } end modules ['mult-aux'] = {
}
local find = string.find
+local next = next
interfaces.namespaces = interfaces.namespaces or { }
local namespaces = interfaces.namespaces
diff --git a/tex/context/base/mkiv/mult-aux.mkiv b/tex/context/base/mkiv/mult-aux.mkiv
index a64e09305..96609c4f0 100644
--- a/tex/context/base/mkiv/mult-aux.mkiv
+++ b/tex/context/base/mkiv/mult-aux.mkiv
@@ -25,11 +25,13 @@
\writestatus{loading}{ConTeXt Multilingual Macros / Helpers}
-\registerctxluafile{mult-aux}{1.001}
+\registerctxluafile{mult-aux}{}
\unprotect
-\edef\??empty{\Uchar25} \letvalue{\Uchar25}\empty % cancel: dec:24 hex:18
+\edef\??empty{\Uchar25} \letvalue{\Uchar25}\empty % cancel: dec:24 hex:18
+
+%edef\s!parent{\Uchar29} \letvalue{\Uchar29}\empty % + inlining is ugly, a tiny bit faster, but neglectable on a run
%D \starttyping
%D \unprotect
@@ -209,39 +211,22 @@
%
% \def\currenttest{oeps} \edef\hans{\detokenizedtestparameter{bagger}}\meaning\hans\par
% \def\currenttest{oeps} \edef\hans{\detokenizedtestparameter{reggab}}\meaning\hans\par
+%
+% slower: \def#3##1{\csname\ifcsname#1#2:##1\endcsname\expandafter\csstring\lastnamedcs\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}%
+%
+% pre-expansion can be a bit faster but handly any effect on a normal run so let's go for
+% saving some memory
\def\mult_interfaces_detokenize{\expandafter\expandafter\expandafter\detokenize\expandafter\expandafter\expandafter}
-\unexpanded\def\mult_interfaces_install_parameter_handler#1#2#3#4#5#6#7#8#9% inlining \csname*\endcsname is more efficient (#3 and #6 only)
- {\ifx#2\relax\let#2\empty\fi % it is hardly faster but produces less expansion tracing
- %\def#3##1{\csname#4{#1#2}{##1}\endcsname}%
+\unexpanded\def\mult_interfaces_install_parameter_handler#1#2#3#4#5#6#7#8% inlining \csname*\endcsname is more efficient (#3 and #6 only)
+ {\ifx#2\relax\let#2\empty\fi % it is hardly faster but produces less expansion tracing
\def#3##1{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}%
\def#4##1##2{\ifcsname##1:##2\endcsname##1:##2\else\expandafter#5\csname##1:\s!parent\endcsname{##2}\fi}%
- %\def#5##1##2{\ifx##1\relax\s!empty\else#4{##1}{##2}\fi}% is {} needed around ##1 ?
\def#5##1##2{\ifx##1\relax\??empty\else#4{##1}{##2}\fi}% is {} needed around ##1 ?
\def#6##1##2{\csname\ifcsname#1##1:##2\endcsname#1##1:##2\else\expandafter#5\csname#1##1:\s!parent\endcsname{##2}\fi\endcsname}%
\def#7##1{\detokenize\expandafter\expandafter\expandafter{\csname#1#2:##1\endcsname}}% always root, no backtrack
- % \def#7##1{\mult_interfaces_detokenize{\csname#4{#1#2}{##1}\endcsname}}% compact version
- % \def#7##1{\mult_interfaces_detokenize{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}}%
-%% \def#8##1{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\s!empty\fi\endcsname}%
- \def#8##1{\begincsname#1#2:##1\endcsname}%
- \def#9##1{\csname#1#2:##1\endcsname}} % can go when we use \begincsname
-
-% pre-expansion can be a bit faster but handly any effect on a normal run so let's go for
-% saving some memory
-%
-% \unexpanded\def\mult_interfaces_install_parameter_handler#1#2#3#4#5#6#7#8#9% inlining \csname*\endcsname is more efficient (#3 and #6 only)
-% {\ifx#2\relax\let#2\empty\fi % it is hardly faster but produces less expansion tracing
-% %\def#3##1{\csname#4{#1#2}{##1}\endcsname}%
-% \edef#3##1{\noexpand\csname\noexpand\ifcsname#1\noexpand#2:##1\endcsname#1\noexpand#2:##1\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname#1\noexpand#2:\s!parent\endcsname{##1}\noexpand\fi\endcsname}%
-% \edef#4##1##2{\noexpand\ifcsname##1:##2\endcsname##1:##2\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname##1:\s!parent\endcsname{##2}\noexpand\fi}%
-% \def #5##1##2{\ifx##1\relax\s!empty\else#4{##1}{##2}\fi}% is {} needed around ##1 ?
-% \edef#6##1##2{\noexpand\csname\noexpand\ifcsname#1##1:##2\endcsname#1##1:##2\noexpand\else\noexpand\expandafter\noexpand#5\noexpand\csname#1##1:\s!parent\endcsname{##2}\noexpand\fi\endcsname}%
-% \def#7##1{\detokenize\expandafter\expandafter\expandafter{\csname#1#2:##1\endcsname}}% always root, no backtrack
-% % \def#7##1{\mult_interfaces_detokenize{\csname#4{#1#2}{##1}\endcsname}}% compact version
-% % \def#7##1{\mult_interfaces_detokenize{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}}%
-% \edef#8##1{\noexpand\csname\noexpand\ifcsname#1\noexpand#2:##1\endcsname#1\noexpand#2:##1\noexpand\else\s!empty\noexpand\fi\endcsname}%
-% \edef#9##1{\noexpand\csname#1#2:##1\endcsname}}
+ \def#8##1{\begincsname#1#2:##1\endcsname}}
\unexpanded\def\installparameterhandler#1#2%
{\normalexpanded
@@ -253,13 +238,10 @@
\expandafter\noexpand\csname do#2parentparameter\endcsname % or : #2_parent_parameter
\expandafter\noexpand\csname named#2parameter\endcsname
\expandafter\noexpand\csname detokenized#2parameter\endcsname
- \expandafter\noexpand\csname strict#2parameter\endcsname % checked
- \expandafter\noexpand\csname direct#2parameter\endcsname}} % unchecked
+ \expandafter\noexpand\csname direct#2parameter\endcsname}} % strict#2parameter is gone
\unexpanded\def\mult_interfaces_install_root_parameter_handler#1#2#3%
{\def#2##1{\detokenize\expandafter\expandafter\expandafter{\csname#1:##1\endcsname}}% always root
- %\def#3##1{\csname\ifcsname#1:##1\endcsname#1:##1\else\s!empty\fi\endcsname}}
- %\def#3##1{\csname\ifcsname#1:##1\endcsname#1:##1\else\??empty\fi\endcsname}}
\def#3##1{\begincsname#1:##1\endcsname}}
\unexpanded\def\installrootparameterhandler#1#2%
@@ -628,7 +610,7 @@
{\def#3##1{\begincsname#1##1\endcsname}%
\def#4##1{\detokenize\expandafter\expandafter\expandafter{\csname#1##1\endcsname}}%
% \def#4##1{\mult_interfaces_detokenize{\csname\ifcsname#1#2:##1\endcsname#1#2:##1\else\expandafter#5\csname#1#2:\s!parent\endcsname{##1}\fi\endcsname}}%
- \def#5##1{\csname#1##1\endcsname}}
+ \def#5##1{\begincsname#1##1\endcsname}}
\unexpanded\def\installdirectparameterhandler#1#2%
{\normalexpanded
@@ -778,13 +760,13 @@
\newcount\c_mult_interfaces_n_of_namespaces
%def\v_interfaces_prefix_template{\number \c_mult_interfaces_n_of_namespaces>}
-\def\v_interfaces_prefix_template{\characters\c_mult_interfaces_n_of_namespaces>}
+%def\v_interfaces_prefix_template{\characters\c_mult_interfaces_n_of_namespaces>}
-\def\v_interfaces_prefix_template % consistently %03i>
- {\ifnum\c_mult_interfaces_n_of_namespaces<\plusten00\else\ifnum\c_mult_interfaces_n_of_namespaces<\plushundred0\fi\fi
- \number\c_mult_interfaces_n_of_namespaces>}
+%def\v_interfaces_prefix_template % consistently %03i>
+% {\ifnum\c_mult_interfaces_n_of_namespaces<\plusten00\else\ifnum\c_mult_interfaces_n_of_namespaces<\plushundred0\fi\fi
+% \number\c_mult_interfaces_n_of_namespaces>}
-\def\v_interfaces_prefix_template % consistently %03i>
+\def\v_interfaces_prefix_template
{\number\c_mult_interfaces_n_of_namespaces>}
\unexpanded\def\installnamespace#1% for modules and users
@@ -820,9 +802,8 @@
\letvalue\??dummy\empty
-%% \def\dummyparameter #1{\csname\??dummy\ifcsname\??dummy#1\endcsname#1\fi\endcsname}
\def\dummyparameter #1{\begincsname\??dummy#1\endcsname}
- \def\directdummyparameter#1{\csname\??dummy#1\endcsname}
+ \def\directdummyparameter#1{\begincsname\??dummy#1\endcsname}
\unexpanded\def\setdummyparameter #1{\expandafter\def\csname\??dummy#1\endcsname}
\unexpanded\def\letdummyparameter #1{\expandafter\let\csname\??dummy#1\endcsname}
diff --git a/tex/context/base/mkiv/mult-chk.mkiv b/tex/context/base/mkiv/mult-chk.mkiv
index 9208a73e1..b3005b012 100644
--- a/tex/context/base/mkiv/mult-chk.mkiv
+++ b/tex/context/base/mkiv/mult-chk.mkiv
@@ -33,7 +33,7 @@
\unprotect
-\registerctxluafile{mult-chk}{1.001}
+\registerctxluafile{mult-chk}{}
\unexpanded\def\setvalidparameterkeys{\dodoubleargument\mult_checkers_set_valid_parameter_keys}
\unexpanded\def\addvalidparameterkeys{\dodoubleargument\mult_checkers_add_valid_parameter_keys}
diff --git a/tex/context/base/mkiv/mult-def.lua b/tex/context/base/mkiv/mult-def.lua
index 7cee595cd..925c22cd2 100644
--- a/tex/context/base/mkiv/mult-def.lua
+++ b/tex/context/base/mkiv/mult-def.lua
@@ -6589,6 +6589,18 @@ return {
["pe"]="حرفتنظیم",
["ro"]="aliniazacaracter",
},
+ ["alignmentleftwidth"]={
+ ["en"]="alignmentleftwidth",
+ },
+ ["alignmentrightwidth"]={
+ ["en"]="alignmentrightwidth",
+ },
+ ["alignmentleftsample"]={
+ ["en"]="alignmentleftsample",
+ },
+ ["alignmentrightsample"]={
+ ["en"]="alignmentrightsample",
+ },
["alignmentcharacter"]={
["cs"]="alignmentcharacter",
["de"]="alignmentcharacter",
@@ -7061,6 +7073,10 @@ return {
["pe"]="وضعیتپایین",
["ro"]="starejos",
},
+ ["break"]={
+ ["en"]="break",
+ ["nl"]="break",
+ },
["buffer"]={
["en"]="buffer",
["nl"]="buffer",
@@ -7813,6 +7829,7 @@ return {
},
["field"]={
["en"]="field",
+ ["nl"]="veld",
},
["fieldbackgroundcolor"]={
["cs"]="barvapozadipole",
@@ -7867,6 +7884,9 @@ return {
["filler"]={
["en"]="filler",
},
+ ["filter"]={
+ ["en"]="filter",
+ },
["filtercommand"]={
["en"]="filtercommand",
},
@@ -8183,6 +8203,10 @@ return {
["pe"]="تبدیلسر",
["ro"]="conversietitlu",
},
+ ["headseparator"]={
+ ["en"]="headseparator",
+ ["nl"]="kopscheider",
+ },
["header"]={
["cs"]="zahlavi",
["de"]="kopfzeile",
@@ -9264,6 +9288,11 @@ return {
},
["note"]={
["en"]="note",
+ ["nl"]="noot",
+ },
+ ["notes"]={
+ ["en"]="notes",
+ ["nl"]="noten",
},
["nr"]={
["cs"]="nr",
@@ -9834,6 +9863,10 @@ return {
["pe"]="شمارهقبلی",
["ro"]="numarprecedent",
},
+ ["print"]={
+ ["en"]="print",
+ ["nl"]="print",
+ },
["printable"]={
["cs"]="tisknutelne",
["de"]="druckbar",
@@ -10271,6 +10304,12 @@ return {
["pe"]="همانصفحه",
["ro"]="aceeasipagina",
},
+ ["leftsample"]={
+ ["en"]="leftsample",
+ },
+ ["rightsample"]={
+ ["en"]="rightsample",
+ },
["sample"]={
["cs"]="vzor",
["de"]="muster",
@@ -10450,6 +10489,18 @@ return {
["pe"]="فضایکناریقبل",
["ro"]="spatiulateralinainte",
},
+ ["sidespaceinbetween"]={
+ ["en"]="sidespaceinbetween",
+ ["nl"]="zijtussenwit",
+ },
+ ["spaceafterside"]={
+ ["en"]="spaceafterside",
+ ["nl"]="witnazij",
+ },
+ ["spacebeforeside"]={
+ ["en"]="spacebeforeside",
+ ["nl"]="witvoorzij",
+ },
["sign"]={
["cs"]="znak",
["de"]="zeichen",
@@ -10713,6 +10764,14 @@ return {
["pe"]="بست",
["ro"]="strut",
},
+ ["numberstrut"]={
+ ["en"]="numberstrut",
+ ["nl"]="nummerstrut",
+ },
+ ["titlestrut"]={
+ ["en"]="titlestrut",
+ ["nl"]="titelstrut",
+ },
["style"]={
["cs"]="pismeno",
["de"]="stil",
@@ -11032,6 +11091,15 @@ return {
["threshold"]={
["en"]="threshold",
},
+ ["sidethreshold"]={
+ ["en"]="sidethreshold",
+ },
+ ["displaythreshold"]={
+ ["en"]="displaythreshold",
+ },
+ ["inlinethreshold"]={
+ ["en"]="inlinethreshold",
+ },
["title"]={
["cs"]="titul",
["de"]="titel",
@@ -11052,6 +11120,10 @@ return {
["pe"]="رنگعنوان",
["ro"]="culoaretitlu",
},
+ ["simplecommand"]={
+ ["en"]="simplecommand",
+ ["nl"]="simpelcommando",
+ },
["titlecommand"]={
["cs"]="titlecommand",
["de"]="titlecommand",
@@ -13340,6 +13412,10 @@ return {
["pe"]="فوریه",
["ro"]="februarie",
},
+ ["field"]={
+ ["en"]="field",
+ ["nl"]="veld",
+ },
["figure"]={
["cs"]="obrazek",
["de"]="abbildung",
@@ -16351,6 +16427,10 @@ return {
["pe"]="ایست",
["ro"]="stopper",
},
+ ["nostopper"]={
+ ["en"]="nostopper",
+ ["nl"]="geenafsluiter",
+ },
["stretch"]={
["cs"]="natahnout",
["de"]="strecken",
diff --git a/tex/context/base/mkiv/mult-fun.lua b/tex/context/base/mkiv/mult-fun.lua
index df127eb5c..9b7062605 100644
--- a/tex/context/base/mkiv/mult-fun.lua
+++ b/tex/context/base/mkiv/mult-fun.lua
@@ -11,7 +11,7 @@ return {
-- "originlength", "tickstep ", "ticklength",
-- "autoarrows", "ahfactor",
-- "angleoffset", anglelength", anglemethod",
- "ahvariant", "ahdimple", "ahfactor",
+ "ahvariant", "ahdimple", "ahfactor", "ahscale",
"metapostversion",
"maxdimensions",
"drawoptionsfactor",
@@ -19,11 +19,12 @@ return {
"crossingscale", "crossingoption",
},
commands = {
- "loadmodule", "dispose", "nothing", "transparency", "tolist", "topath", "tocycle",
+ "loadfile", "loadimage", "loadmodule",
+ "dispose", "nothing", "transparency", "tolist", "topath", "tocycle",
--
"sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
"tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh",
+ "invsin", "invcos", "invtan", "acosh", "asinh", "sinh", "cosh", "tanh",
"zmod",
"paired", "tripled",
"unitcircle", "fulldiamond", "unitdiamond", "fullsquare", "unittriangle", "fulltriangle",
@@ -35,10 +36,10 @@ return {
"smoothed", "cornered", "superellipsed", "randomized", "randomizedcontrols", "squeezed", "enlonged", "shortened",
"punked", "curved", "unspiked", "simplified", "blownup", "stretched",
"enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged",
- "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed",
+ "crossed", "laddered", "randomshifted", "interpolated", "perpendicular", "paralleled", "cutends", "peepholed",
"llenlarged", "lrenlarged", "urenlarged", "ulenlarged",
"llmoved", "lrmoved", "urmoved", "ulmoved",
- "rightarrow", "leftarrow", "centerarrow",
+ "rightarrow", "leftarrow", "centerarrow", "drawdoublearrows",
"boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox",
"boundingradius", "boundingcircle", "boundingpoint",
"crossingunder", "insideof", "outsideof",
@@ -62,7 +63,7 @@ return {
"checkedbounds", "checkbounds", "strut", "rule",
"withmask", "bitmapimage",
"colordecimals", "ddecimal", "dddecimal", "ddddecimal", "colordecimalslist",
- "textext", "thetextext", "rawtextext", "textextoffset", "texbox", "thetexbox", "rawtexbox",
+ "textext", "thetextext", "rawtextext", "textextoffset", "texbox", "thetexbox", "rawtexbox", "istextext",
"verbatim",
"thelabel", "label",
"autoalign",
diff --git a/tex/context/base/mkiv/mult-ini.lua b/tex/context/base/mkiv/mult-ini.lua
index 19585a7fa..b899d25f2 100644
--- a/tex/context/base/mkiv/mult-ini.lua
+++ b/tex/context/base/mkiv/mult-ini.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['mult-ini'] = {
local format, gmatch, match, find, sub = string.format, string.gmatch, string.match, string.find, string.sub
local lpegmatch = lpeg.match
local serialize, concat = table.serialize, table.concat
-local rawget, type = rawget, type
+local rawget, type, tonumber, next = rawget, type, tonumber, next
local context = context
local commands = commands
diff --git a/tex/context/base/mkiv/mult-ini.mkiv b/tex/context/base/mkiv/mult-ini.mkiv
index 8fd0d9472..d66749d14 100644
--- a/tex/context/base/mkiv/mult-ini.mkiv
+++ b/tex/context/base/mkiv/mult-ini.mkiv
@@ -11,27 +11,25 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-%D This module is a stripped down version of \type {mult-ini.tex},
-%D which we keep around as \type {mult-kep.tex} for sentimental
-%D reasons. There you will find some more historic information.
+%D This module is a stripped down version of \type {mult-ini.tex}, which we keep
+%D around as \type {mult-kep.tex} for sentimental reasons. There you will find some
+%D more historic information.
\writestatus{loading}{ConTeXt Multilingual Macros / Initialization}
\unprotect
-\registerctxluafile{mult-ini}{1.001}
+\registerctxluafile{mult-ini}{}
%D \macros
%D [constanten,variabelen,commands]
%D {v!,c!,k!,s!,e!,m!,l!,r!,f!,p!,x!,y!}
%D
-%D In the system modules we introduced some prefixed constants,
-%D variables (both macros) and registers. Apart from a
-%D tremendous saving in terms of memory and a gain in speed we
-%D use from now on prefixes when possible for just another
-%D reason: consistency and multi||linguality. Systematically
-%D using prefixed macros enables us to implement a
-%D multi||lingual user interface. Redefining these next set of
+%D In the system modules we introduced some prefixed constants, variables (both
+%D macros) and registers. Apart from a tremendous saving in terms of memory and a
+%D gain in speed we use from now on prefixes when possible for just another reason:
+%D consistency and multi||linguality. Systematically using prefixed macros enables
+%D us to implement a multi||lingual user interface. Redefining these next set of
%D prefixes therefore can have desastrous results.
%D
%D \startlinecorrection
@@ -50,9 +48,9 @@
%D \stoptable
%D \stoplinecorrection
%D
-%D In the single||lingual version we used \type{!}, \type{!!},
-%D \type{!!!} and \type{!!!!}. In the meantime some of these
-%D are obsolete (we had some 12 originally).
+%D In the single||lingual version we used \type {!}, \type {!!}, \type {!!!} and
+%D \type {!!!!}. In the meantime some of these are obsolete (we had some 12
+%D originally).
\def\c!prefix!{c!}
\def\k!prefix!{k!}
@@ -66,18 +64,16 @@
%D [constants,variables,commands]
%D {@@,??}
%D
-%D Variables generated by the system can be recognized on their
-%D prefix \type{@@}. They are composed of a command (class)
-%D specific tag, which can be recognized on \type{??}, and a
-%D system constant, which has the prefix \type{c!}. We'll se
-%D some more of this.
+%D Variables generated by the system can be recognized on their prefix \type {@@}.
+%D They are composed of a command (class) specific tag, which can be recognized on
+%D \type {??}, and a system constant, which has the prefix \type {c!}. We'll se some
+%D more of this.
\def\??prefix {??}
\def\@@prefix {@@}
-%D Just to be complete we repeat some of the already defined
-%D system constants here. Maybe their prefix \type{\s!} now
-%D falls into place.
+%D Just to be complete we repeat some of the already defined system constants here.
+%D Maybe their prefix \type {\s!} now falls into place.
\def\s!next {next} \def\s!default {default}
\def\s!dummy {dummy} \def\s!unknown {unknown}
@@ -90,26 +86,24 @@
\def\s!true {true}
\def\s!false {false}
-%D The word \type{height} takes 6~token memory cells. The
-%D control sequence \type{\height} on the other hand uses only
-%D one. Knowing this, we can improve the performance of \TEX,
-%D both is terms of speed and memory usage, by using control
+%D The word \type {height} takes 6~token memory cells. The control sequence \type
+%D {\height} on the other hand uses only one. Knowing this, we can improve the
+%D performance of \TEX, both is terms of speed and memory usage, by using control
%D sequences instead of the words written in full.
%D
-%D Where in the \ASCII\ file the second lines takes nine extra
-%D characters, \TEX\ saves us 13~tokens.
+%D Where in the \ASCII\ file the second lines takes nine extra characters, \TEX\
+%D saves us 13~tokens.
%D
%D \starttyping
%D \hrule width 10pt height 2pt depth 1pt
%D \hrule \s!width 10pt \s!height 2pt \s!depth 1pt
%D \stoptyping
%D
-%D One condition is that we have defined \type {\s!height},
-%D \type {\s!width} and \type {\s!depth} as respectively
-%D \type {height}, \type {width} and \type {depth}. Using this
-%D scheme therefore only makes sense when a token sequence is
-%D used more than once. Savings like this should of course be
-%D implemented in english, just because \TEX\ is english.
+%D One condition is that we have defined \type {\s!height}, \type {\s!width} and
+%D \type {\s!depth} as respectively \type {height}, \type {width} and \type {depth}.
+%D Using this scheme therefore only makes sense when a token sequence is used more
+%D than once. Savings like this should of course be implemented in english, just
+%D because \TEX\ is english.
\def\s!width {width} \let\!!width \s!width % obsolete
\def\s!height{height} \let\!!height\s!height % obsolete
@@ -142,19 +136,16 @@
%D definemessageconstant,
%D definefileconstant}
%D
-%D The first part of this module is dedicated to dealing with
-%D multi||lingual constants and variables. When \CONTEXT\ grew
-%D bigger and bigger in terms of bytes and used string space,
-%D we switched to predefined constants. At the cost of more
-%D hash table entries, the macros not only becase more compact,
-%D they became much faster too. Maybe an even bigger advantage
-%D was that mispelling could no longer lead to problems. Even a
-%D multi||lingual interface became possible.
+%D The first part of this module is dedicated to dealing with multi||lingual
+%D constants and variables. When \CONTEXT\ grew bigger and bigger in terms of bytes
+%D and used string space, we switched to predefined constants. At the cost of more
+%D hash table entries, the macros not only becase more compact, they became much
+%D faster too. Maybe an even bigger advantage was that mispelling could no longer
+%D lead to problems. Even a multi||lingual interface became possible.
%D
-%D Constants --- we'll introduce the concept of variables later
-%D on --- are preceded by a type specific prefix, followed by a
-%D \type{!}. To force consistency, we provide a few commands
-%D for defining such constants.
+%D Constants |<|we'll introduce the concept of variables later on|>| are preceded by
+%D a type specific prefix, followed by a \type {!}. To force consistency, we provide
+%D a few commands for defining such constants.
%D
%D \starttyping
%D \defineinterfaceconstant {name} {meaning}
@@ -192,10 +183,9 @@
\unexpanded\def\definesystemconstant #1{\expandafter\def\csname\s!prefix!#1\endcsname{#1}}
\unexpanded\def\definemessageconstant #1{\expandafter\def\csname\m!prefix!#1\endcsname{#1}}
-%D In a parameter driven system, some parameters are shared
-%D by more system components. In \CONTEXT\ we can distinguish
-%D parameters by a unique prefix. Such a prefix is defined
-%D with:
+%D In a parameter driven system, some parameters are shared by more system
+%D components. In \CONTEXT\ we can distinguish parameters by a unique prefix. Such a
+%D prefix is defined with:
%D
%D \starttyping
%D \definesystemvariable {name}
@@ -209,16 +199,14 @@
%D {selectinterface,
%D defaultinterface, currentinterface, currentresponses}
%D
-%D With \type{\selectinterface} we specify the language we are
-%D going to use. The system asks for the language wanted, and
-%D defaults to \type{\currentinterface} when we just give
-%D \type{enter}. By default the message system uses the
-%D current interface language, but \type{\currentresponses}
-%D can specify another language too.
+%D With \type {\selectinterface} we specify the language we are going to use. The
+%D system asks for the language wanted, and defaults to \type {\currentinterface}
+%D when we just give \type {enter}. By default the message system uses the current
+%D interface language, but \type {\currentresponses} can specify another language
+%D too.
%D
-%D Because we want to generate formats directly too, we do
-%D not ask for interface specifications when these are already
-%D defined (like in cont-nl.tex and alike).
+%D Because we want to generate formats directly too, we do not ask for interface
+%D specifications when these are already defined (like in cont-nl.tex and alike).
\ifdefined\defaultinterface
@@ -257,9 +245,8 @@
%D \macros
%D {startinterface}
%D
-%D Sometimes we want to define things only for specific
-%D interface languages. This can be done by means of the
-%D selector:
+%D Sometimes we want to define things only for specific interface languages. This
+%D can be done by means of the selector:
%D
%D \starttyping
%D \startinterface language
@@ -273,24 +260,22 @@
{\doifnot{#1}{all}{\doifnotinset\currentinterface{#1}{\gobbleuntil\stopinterface}}}
\let\stopinterface\relax
-
+
%D \macros
%D {startmessages,
%D getmessage,
%D showmessage,
%D makemessage}
%D
-%D A package as large as \CONTEXT\ can hardly function without
-%D a decent message mechanism. Due to its multi||lingual
-%D interface, the message subsystem has to be multi||lingual
-%D too. A major drawback of this feature is that we have to
-%D code messages. As a result, the source becomes less self
-%D documented. On the other hand, consistency will improve.
+%D A package as large as \CONTEXT\ can hardly function without a decent message
+%D mechanism. Due to its multi||lingual interface, the message subsystem has to be
+%D multi||lingual too. A major drawback of this feature is that we have to code
+%D messages. As a result, the source becomes less self documented. On the other
+%D hand, consistency will improve.
%D
-%D Because the overhead in terms of entries in the (already
-%D exhausted) hash table has to be minimal, messages are packed
-%D in libraries. We can extract a message from such a library
-%D in three ways:
+%D Because the overhead in terms of entries in the (already exhausted) hash table
+%D has to be minimal, messages are packed in libraries. We can extract a message
+%D from such a library in three ways:
%D
%D \starttyping
%D \getmessage {library} {tag}
@@ -298,13 +283,11 @@
%D \makemessage {library} {tag} {data}
%D \stoptyping
%D
-%D The first command gets the message \type{tag} from the
-%D \type{library} specified. The other commands take an extra
-%D argument: a list of items to be inserted in the message
-%D text. While \type{\showmessage} shows the message at the
-%D terminal, the other commands generate the message as text.
-%D Before we explain the \type{data} argument, we give an
-%D example of a library.
+%D The first command gets the message \type {tag} from the \type {library}
+%D specified. The other commands take an extra argument: a list of items to be
+%D inserted in the message text. While \type {\showmessage} shows the message at the
+%D terminal, the other commands generate the message as text. Before we explain the
+%D \type {data} argument, we give an example of a library.
%D
%D \starttyping
%D \startmessages english library: alfa
@@ -332,12 +315,11 @@
%D something : second (and last) message to you
%D \stoptyping
%D
-%D As we can see, the title entry is shown with the message.
-%D The data fields are comma separated and are specified in the
-%D message text by \type{--}.
+%D As we can see, the title entry is shown with the message. The data fields are
+%D comma separated and are specified in the message text by \type {--}.
%D
-%D It is not required to define all messages in a library at
-%D once. We can add messages to a library in the following way:
+%D It is not required to define all messages in a library at once. We can add
+%D messages to a library in the following way:
%D
%D \starttyping
%D \startmessages english library: alfa
@@ -345,21 +327,17 @@
%D \stopmessages
%D \stoptyping
%D
-%D Because such definitions can take place in different
-%D modules, the system gives a warning when a tag occurs more
-%D than once. The first occurrence takes preference over later
-%D ones, so we had better use a save offset, as shown in the
-%D example. As we can see, the title field is specified only
-%D the first time!
+%D Because such definitions can take place in different modules, the system gives a
+%D warning when a tag occurs more than once. The first occurrence takes preference
+%D over later ones, so we had better use a save offset, as shown in the example. As
+%D we can see, the title field is specified only the first time!
%D
-%D Because we want to check for duplicate tags, the macros
-%D are a bit more complicated than neccessary. The \NEWLINE\
-%D token is used as message separator.
+%D Because we want to check for duplicate tags, the macros are a bit more
+%D complicated than neccessary. The \NEWLINE\ token is used as message separator.
+%D
+%D For internal purposes one can use \type {\setmessagetext}, which puts the message
+%D text asked for in \type {\currentmessagetext}.
%D
-%D For internal purposes one can use \type {\setmessagetext},
-%D which puts the message text asked for in \type
-%D {\currentmessagetext}.
-
%D These will become obsolete:
\unexpanded\def\startmessages #1 library: #2 %
@@ -399,17 +377,16 @@
%D \macros
%D {ifshowwarnings, ifshowmessages}
%D
-%D Sometimes displaying message can slow down processing
-%D considerably. We therefore introduce warnings. Users can
-%D turn of warnings and messages by saying:
+%D Sometimes displaying message can slow down processing considerably. We therefore
+%D introduce warnings. Users can turn of warnings and messages by saying:
%D
%D \starttyping
%D \showwarningstrue
%D \showmessagestrue
%D \stoptyping
%D
-%D Turning off messages also turns off warnings, which is
-%D quote logical because they are less important.
+%D Turning off messages also turns off warnings, which is quote logical because they
+%D are less important.
% not yet mkiv
@@ -435,13 +412,12 @@
%D \macros
%D {dosetvalue,dosetevalue,dosetgvalue,dosetxvalue,docopyvalue,doresetvalue} % dogetvalue
%D
-%D We already defined these auxiliary macros in the system
-%D modules. Starting with this module however, we have to take
-%D multi||linguality a bit more serious.
-
-%D In due time, when we exclusively use the parameter handler code, we can drop
-%D the backmapping (\type{\c!k...}) and make \type {\c!c...} similar to
-%D \type {\v!...}. In that case we can simply the following setters.
+%D We already defined these auxiliary macros in the system modules. Starting with
+%D this module however, we have to take multi||linguality a bit more serious.
+%D
+%D In due time, when we exclusively use the parameter handler code, we can drop the
+%D backmapping (\type{\c!k...}) and make \type {\c!c...} similar to \type {\v!...}.
+%D In that case we can simply the following setters.
\unexpanded\def\doletvalue #1#2{\expandafter\let \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
\unexpanded\def\dosetvalue #1#2{\expandafter\def \csname#1\ifcsname\k!prefix!#2\endcsname\csname\k!prefix!#2\endcsname\else#2\fi\endcsname}
@@ -476,16 +452,16 @@
\stopinterface
-%D We can now redefine some messages that will be
-%D introduced in the multi||lingual system module.
+%D We can now redefine some messages that will be introduced in the multi||lingual
+%D system module.
\unexpanded\def\showassignerror #1#2{\showmessage\m!check1{#1,#2}\waitonfatalerror}
\unexpanded\def\showargumenterror#1#2{\showmessage\m!check2{#1,#2}\waitonfatalerror}
\unexpanded\def\showdefinederror #1#2{\showmessage\m!check3{#1,#2}\waitonfatalerror}
-%D \CONTEXT\ is a parameter driven package. This means that
-%D users instruct the system by means of variables, values and
-%D keywords. These instructions take the form:
+%D \CONTEXT\ is a parameter driven package. This means that users instruct the
+%D system by means of variables, values and keywords. These instructions take the
+%D form:
%D
%D \starttyping
%D \setupsomething[some variable=some value, another one=a keyword]
@@ -497,13 +473,11 @@
%D \dosomething[this way,that way,no way]
%D \stoptyping
%D
-%D Because the same variables can occur in more than one setup
-%D command, we have to be able to distinguish them. This is
-%D achieved by assigning them a unique prefix.
+%D Because the same variables can occur in more than one setup command, we have to
+%D be able to distinguish them. This is achieved by assigning them a unique prefix.
%D
-%D Imagine a setup command for boxed text, that enables us to
-%D specify the height and width of the box. Behide the scenes
-%D the command
+%D Imagine a setup command for boxed text, that enables us to specify the height and
+%D width of the box. Behide the scenes the command
%D
%D \starttyping
%D \setupbox [width=12cm, height=3cm]
@@ -517,16 +491,16 @@
%D \stoptyping
%D
%D while a similar command for specifying the page dimensions
-%D of an \cap{A4} page results in:
+%D of an \cap {A4} page results in:
%D
%D \starttyping
%D \ {21.0cm}
%D \ {27.9cm}
%D \stoptyping
%D
-%D The prefixes \type{} and \type{} are hidden from
-%D users and can therefore be language independant. Variables
-%D on the other hand, differ for each language:
+%D The prefixes \type {} and \type {} are hidden from users and can
+%D therefore be language independant. Variables on the other hand, differ for each
+%D language:
%D
%D \starttyping
%D \ {}
@@ -534,28 +508,24 @@
%D \ {}
%D \stoptyping
%D
-%D In this example we can see that the assigned values or
-%D keywords are language dependant too. This will be a
-%D complication when defining multi||lingual setup files.
+%D In this example we can see that the assigned values or keywords are language
+%D dependant too. This will be a complication when defining multi||lingual setup
+%D files.
%D
-%D A third phenomena is that variables and values can have a
-%D similar meaning.
+%D A third phenomena is that variables and values can have a similar meaning.
%D
%D \starttyping
%D \ {}
%D \ {12cm}
%D \stoptyping
%D
-%D A (minor) complication is that where in english we use
-%D \type{}, in dutch we find both \type{} and
-%D \type{}. This means that when we use some sort of
-%D translation table, we have to distinguish between the
-%D variables at the left side and the fixed values at the
-%D right.
+%D A (minor) complication is that where in english we use \type {}, in dutch
+%D we find both \type {} and \type {}. This means that when we use
+%D some sort of translation table, we have to distinguish between the variables at
+%D the left side and the fixed values at the right.
%D
-%D The same goes for commands that are composed of different
-%D user supplied and/or language specific elements. In english
-%D we can use:
+%D The same goes for commands that are composed of different user supplied and/or
+%D language specific elements. In english we can use:
%D
%D \starttyping
%D \
@@ -569,14 +539,13 @@
%D \
%D \stoptyping
%D
-%D These subtle differences automatically lead to a solution
-%D where variables, values, elements and other components have
-%D a similar logical name (used in macro's) but a different
-%D meaning (supplied by the user).
+%D These subtle differences automatically lead to a solution where variables,
+%D values, elements and other components have a similar logical name (used in
+%D macro's) but a different meaning (supplied by the user).
%D
-%D Our solution is one in which the whole system is programmed
-%D in terms of identifiers with language specific meanings. In
-%D such an implementation, each fixed variable is available as:
+%D Our solution is one in which the whole system is programmed in terms of
+%D identifiers with language specific meanings. In such an implementation, each
+%D fixed variable is available as:
%D
%D \starttyping
%D \
@@ -594,75 +563,66 @@
%D \def\boxwidth{12cm}
%D \stoptyping
%D
-%D because we don't want to recode the source, a setup command
-%D in another language has to expand to this variable, so:
+%D because we don't want to recode the source, a setup command in another language
+%D has to expand to this variable, so:
%D
%D \starttyping
%D \setupblock[width=12cm]
%D \stoptyping
%D
-%D has to result in the definition of \type{\boxwidth} too.
-%D This method enables us to build compact, fast and readable
-%D code.
+%D has to result in the definition of \type {\boxwidth} too. This method enables us
+%D to build compact, fast and readable code.
%D
-%D An alternative method, which we considered using, uses a
-%D more indirect way. In this case, both calls generate a
-%D different variable:
+%D An alternative method, which we considered using, uses a more indirect way. In
+%D this case, both calls generate a different variable:
%D
%D \starttyping
%D \def\boxwidth {12cm}
%D \def\boxbreedte {12cm}
%D \stoptyping
%D
-%D And because we don't want to recode those megabytes of
-%D already developed code, this variable has to be called with
-%D something like:
+%D And because we don't want to recode those megabytes of already developed code,
+%D this variable has to be called with something like:
%D
%D \starttyping
%D \valueof\box\width
%D \stoptyping
%D
-%D where \type{\valueof} takes care of the translation of
-%D \type{width} or \type{breedte} to \type{width} and
-%D combining this with \type{box} to \type{\boxwidth}.
+%D where \type {\valueof} takes care of the translation of \type {width} or \type
+%D {breedte} to \type {width} and combining this with \type {box} to \type
+%D {\boxwidth}.
%D
-%D One advantage of this other scheme is that, within certain
-%D limits, we can implement an interface that can be switched
-%D to another language at will, while the current approach
-%D fixes the interface at startup. There are, by the way,
-%D other reasons too for not choosing this scheme. Switching
-%D user generated commands is for instance impossible and a
-%D dual interface would therefore give a strange mix of
-%D languages.
+%D One advantage of this other scheme is that, within certain limits, we can
+%D implement an interface that can be switched to another language at will, while
+%D the current approach fixes the interface at startup. There are, by the way, other
+%D reasons too for not choosing this scheme. Switching user generated commands is
+%D for instance impossible and a dual interface would therefore give a strange mix
+%D of languages.
%D
-%D Now let's work out the first scheme. Although the left hand
-%D of the assignment is a variable from the users point of
-%D view, it is a constant in terms of the system. Both
-%D \type{width} and \type{breedte} expand to \type{width}
-%D because in the source we only encounter \type{width}. Such
-%D system constants are presented as
+%D Now let's work out the first scheme. Although the left hand of the assignment is
+%D a variable from the users point of view, it is a constant in terms of the system.
+%D Both \type {width} and \type {breedte} expand to \type {width} because in the
+%D source we only encounter \type {width}. Such system constants are presented as
%D
%D \starttyping
%D \c!width
%D \stoptyping
%D
-%D This constant is always equivalent to \type{width}. As we
-%D can see, we use \type{c!} to mark this one as constant. Its
-%D dutch counterpart is:
+%D This constant is always equivalent to \type {width}. As we can see, we use \type
+%D {c!} to mark this one as constant. Its dutch counterpart is:
%D
%D \starttyping
%D breedte
%D \stoptyping
%D
-%D When we interpret a setup command each variable is
-%D translated to it's \type{c!} counterpart. This means that
-%D \type{breedte} and \type{width} expand to \type{breedte}
-%D and \type{\c!width} which both expand to \type{width}. That
-%D way user variables become system constants.
+%D When we interpret a setup command each variable is translated to it's \type{c!}
+%D counterpart. This means that \type {breedte} and \type{width} expand to \type
+%D {breedte} and \type {\c!width} which both expand to \type {width}. That way user
+%D variables become system constants.
%D
-%D The interpretation is done by means of a general setup
-%D command \type{\getparameters} that we introduced in the
-%D system module. Let us define some simple setup command:
+%D The interpretation is done by means of a general setup command \type
+%D {\getparameters} that we introduced in the system module. Let us define some
+%D simple setup command:
%D
%D \starttyping
%D \unexpanded\def\setupbox[#1]%
@@ -675,27 +635,25 @@
%D \setupbox [width=3cm, height=1cm]
%D \stoptyping
%D
-%D Afterwards we have two variables \type{\@@bxwidth} and
-%D \type{\@@bxheight} which have the values \type{3cm} and
-%D \type{1cm} assigned. These variables are a combinatiom of
-%D the setup prefix \type{\??bx}, which expands to \type{@@bx}
-%D and the translated user supplied variables \type{width} and
-%D \type{height} or \type{breedte} and \type{hoogte},
-%D depending on the actual language. In dutch we just say:
+%D Afterwards we have two variables \type {\@@bxwidth} and \type {\@@bxheight} which
+%D have the values \type {3cm} and \type {1cm} assigned. These variables are a
+%D combinatiom of the setup prefix \type {\??bx}, which expands to \type {@@bx} and
+%D the translated user supplied variables \type {width} and \type {height} or \type
+%D {breedte} and \type {hoogte}, depending on the actual language. In dutch we just
+%D say:
%D
%D \starttyping
%D \stelblokin [breedte=3cm,hoogte=1cm]
%D \stoptyping
%D
-%D and get ourselves \type{\@@bxwidth} and \type{\@@bxheight}
-%D too. In the source of \CONTEXT, we can recognize constants
-%D and variables on their leading \type{c!}, \type{v!} etc.,
-%D prefixes on \type{??} and composed variables on \type{@@}.
+%D and get ourselves \type {\@@bxwidth} and \type {\@@bxheight} too. In the source
+%D of \CONTEXT, we can recognize constants and variables on their leading \type
+%D {c!}, \type {v!} etc., prefixes on \type {??} and composed variables on \type
+%D {@@}.
%D
-%D We already saw that user supplied keywords need some
-%D special treatment too. This time we don't translate the
-%D keyword, but instead use in the source a variable which
-%D meaning depends on the interface language.
+%D We already saw that user supplied keywords need some special treatment too. This
+%D time we don't translate the keyword, but instead use in the source a variable
+%D which meaning depends on the interface language.
%D
%D \starttyping
%D \v!left
@@ -711,16 +669,14 @@
%D \v!right=>\dosomethingontheright]
%D \stoptyping
%D
-%D Because variables like \type{\@@bxlocation} can have a lot
-%D of meanings, including tricky expandable tokens, we cannot
-%D translate this meaning when we compare. This means that
-%D \type{\@@bxlocation} can be \type{left} of \type{links} of
-%D whatever meaning suits the language. But because
-%D \type{\v!left} also has a meaning that suits the language,
-%D we are able to compare.
+%D Because variables like \type {\@@bxlocation} can have a lot of meanings,
+%D including tricky expandable tokens, we cannot translate this meaning when we
+%D compare. This means that \type {\@@bxlocation} can be \type {left} of \type
+%D {links} of whatever meaning suits the language. But because \type {\v!left} also
+%D has a meaning that suits the language, we are able to compare.
%D
-%D Although we know it sounds confusing we want to state two
-%D important characteristics of the interface as described:
+%D Although we know it sounds confusing we want to state two important
+%D characteristics of the interface as described:
%D
%D \startnarrower \em
%D user variables become system constants
@@ -731,11 +687,10 @@
%D \startnarrower \em
%D user constants (keywords) become system variables
%D \stopnarrower
-
-%D The \type {\c!internal} is a left over from the time that
-%D the user interface documents were not using a specification
-%D alongside a keyword specification but used a shared file in
-%D which case we need to go in both directions.
+%D
+%D The \type {\c!internal} is a left over from the time that the user interface
+%D documents were not using a specification alongside a keyword specification but
+%D used a shared file in which case we need to go in both directions.
% temporary mkiv hack (we can best just store the whole table in memory)
@@ -750,10 +705,9 @@
%D \macros
%D {defineinterfaceconstant}
%D
-%D Next we redefine a previously defined macro to take care of
-%D interface translation too. It's a bit redundant, because
-%D in these situations we could use the c||version, but for
-%D documentation purposes the x||alternative comes in handy.
+%D Next we redefine a previously defined macro to take care of interface translation
+%D too. It's a bit redundant, because in these situations we could use the
+%D c||version, but for documentation purposes the x||alternative comes in handy.
\unexpanded\def\defineinterfaceconstant#1#2%
{\expandafter\def\csname\c!prefix!#1\endcsname{#2}}
@@ -761,16 +715,14 @@
%D \macros
%D {startelements}
%D
-%D Due to the object oriented nature of \CONTEXT, we also need
-%D to define the elements that are used to build commands.
+%D Due to the object oriented nature of \CONTEXT, we also need to define the
+%D elements that are used to build commands.
%D
-%D Such elements sometimes are the same in diferent
-%D languages, but mostly they differ. Things can get even
-%D confusing when we look at for instance the setup commands.
-%D In english we say \type{\setup}, but in dutch we
-%D have: \type{\stelin}. Such split elements are no
-%D problem, because we just define two elements. When no second
-%D part is needed, we use a \type{-}:
+%D Such elements sometimes are the same in different languages, but mostly they
+%D differ. Things can get even confusing when we look at for instance the setup
+%D commands. In english we say \type{\setup}, but in dutch we have: \type
+%D {\stelin}. Such split elements are no problem, because we just define two
+%D elements. When no second part is needed, we use a \type {-}:
\unexpanded\def\setinterfaceelement#1#2%
{\clf_setinterfaceelement{#1}{#2}%
@@ -820,15 +772,18 @@
\stopinterface
-%D So much for the basic multi||lingual interface commands. The
-%D macro's can be enhanced with more testing facilities, but
-%D for the moment they suffice.
+%D So much for the basic multi||lingual interface commands. The macro's can be
+%D enhanced with more testing facilities, but for the moment they suffice.
\ifdefined\zwnj \else \edef\zwnj{\directlua{utf.char(\number"200C)}} \fi % needed for cont-pe % maybe to char-utf.mkiv
\ifdefined\zwj \else \edef\zwj {\directlua{utf.char(\number"200D)}} \fi % needed for cont-pe % maybe to char-utf.mkiv
-%D Out of convenience we define the banners here. This might move
-%D to the \LUA\ end.
+
+%D \macros
+%D {contextversion, contextversionnumber, contextversionno,
+%D contextbanner, showcontextbanner, formatversion}
+%D
+%D Out of convenience we define the banners here. This might move to the \LUA\ end.
\def\contextbanner
{ConTeXt \space
@@ -837,28 +792,34 @@
int: \currentinterface/\currentresponses}
\unexpanded\def\showcontextbanner
- %{\writeline
- % \writestring\contextbanner
- % \writeline}
{\writestatus\m!system\empty
\writestatus\m!system\contextbanner
\writestatus\m!system\empty}
\edef\formatversion
- {\the\normalyear .\the\normalmonth.\the\normalday}
+ {\the\normalyear.\the\normalmonth.\the\normalday}
+
+\newcount\contextversionno
\ifx\contextversion\undefined
- \def\contextversion {unknown}
- \def\contextversionnumber{0}
-\else
- %\def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi#2\ifnum#3<10 0\fi#3 #4:#5}
- \def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi\purenumber{#2}\ifnum#3<10 0\fi\purenumber{#3} #4:#5}
- \edef\contextversionnumber{\expandafter\contextversionnumber\contextversion\relax\space\contextmark}
+ \edef\contextversion{\the\normalyear.\the\normalmonth.\the\normalday\space 00:00}
\fi
+% \def\contextversionnumber#1.#2.#3 #4:#5\relax{#1\ifnum#2<10 0\fi\purenumber{#2}\ifnum#3<10 0\fi\purenumber{#3} #4:#5}
+% \edef\contextversionnumber{\expandafter\contextversionnumber\contextversion\relax\space\contextmark}
+
+\unexpanded\def \contextversionnumber #1.#2.#3 #4:#5\relax{#1#2#3}
+ \contextversionno \expandafter\contextversionnumber\contextversion\relax
+ \edef \contextversionnumber {\the\contextversionno\space\contextmark}
+
+%D \macros
+%D {everydump}
+%D
+%D This one is only used when we generate the format.
+
\ifx\undefined\everydump
- \newtoks\everydump
- \def\dump{\the\everydump\normaldump}
+ \newtoks\everydump
+ \def\dump{\the\everydump\normaldump}
\fi
% \appendtoks \showcontextbanner \to \everydump
diff --git a/tex/context/base/mkiv/mult-low.lua b/tex/context/base/mkiv/mult-low.lua
index 4501afefb..84b4189dc 100644
--- a/tex/context/base/mkiv/mult-low.lua
+++ b/tex/context/base/mkiv/mult-low.lua
@@ -29,7 +29,7 @@ return {
--
"bigskipamount", "medskipamount", "smallskipamount",
--
- "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion",
+ "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion", "texenginefunctionality",
"luatexengine", "pdftexengine", "xetexengine", "unknownengine",
-- "etexversion",
-- "pdftexversion", "pdftexrevision",
@@ -85,6 +85,7 @@ return {
--
"fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink",
"fontexheight", "fontemwidth", "fontextraspace", "slantperpoint",
+ "mathexheight", "mathemwidth",
"interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace",
"mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight",
"muquad",
@@ -123,14 +124,15 @@ return {
--
"luastringsep", "!!bs", "!!es",
--
- "lefttorightmark", "righttoleftmark",
+ "lefttorightmark", "righttoleftmark", "lrm", "rlm",
+ "bidilre", "bidirle", "bidipop", "bidilro", "bidirlo",
--
"breakablethinspace", "nobreakspace", "nonbreakablespace", "narrownobreakspace", "zerowidthnobreakspace",
"ideographicspace", "ideographichalffillspace",
"twoperemspace", "threeperemspace", "fourperemspace", "fiveperemspace", "sixperemspace",
- "figurespace", "punctuationspace", "hairspace",
+ "figurespace", "punctuationspace", "hairspace", "enquad", "emquad",
"zerowidthspace", "zerowidthnonjoiner", "zerowidthjoiner", "zwnj", "zwj",
- "optionalspace", "asciispacechar",
+ "optionalspace", "asciispacechar", "softhyphen",
--
"Ux", "eUx", "Umathaccents",
--
@@ -178,7 +180,7 @@ return {
--
"donetrue", "donefalse", "foundtrue", "foundfalse",
--
- "inlineordisplaymath","indisplaymath","forcedisplaymath","startforceddisplaymath","stopforceddisplaymath","reqno",
+ "inlineordisplaymath","indisplaymath","forcedisplaymath","startforceddisplaymath","stopforceddisplaymath","startpickupmath","stoppickupmath","reqno",
--
"mathortext",
--
@@ -243,6 +245,7 @@ return {
"doifelsenextoptional", "doifnextoptionalelse",
"doifelsenextoptionalcs", "doifnextoptionalcselse",
"doifelsefastoptionalcheck", "doiffastoptionalcheckelse",
+ "doifelsefastoptionalcheckcs", "doiffastoptionalcheckcselse",
"doifelsenextbgroup", "doifnextbgroupelse",
"doifelsenextbgroupcs", "doifnextbgroupcselse",
"doifelsenextparenthesis", "doifnextparenthesiselse",
@@ -260,6 +263,7 @@ return {
"doifelsecommon", "doifcommonelse", "doifcommon", "doifnotcommon",
"doifinstring", "doifnotinstring", "doifelseinstring", "doifinstringelse",
"doifelseassignment", "doifassignmentelse", "docheckassignment",
+ "doiftext", "doifelsetext", "doiftextelse", "doifnottext",
--
"tracingall", "tracingnone", "loggingall",
--
@@ -274,7 +278,7 @@ return {
"singleexpandafter", "doubleexpandafter", "tripleexpandafter",
--
"dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces",
- "removepunctuation",
+ "removepunctuation", "ignoreparskip", "forcestrutdepth",
--
"wait", "writestatus", "define", "defineexpandable", "redefine",
--
@@ -312,7 +316,7 @@ return {
"gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments",
"gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals",
--
- "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
+ "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "fastloopindex", "fastloopfinal", "dowith",
--
"newconstant", "setnewconstant", "setconstant", "setconstantvalue",
"newconditional", "settrue", "setfalse", "settruevalue", "setfalsevalue",
@@ -339,7 +343,7 @@ return {
--
"offinterlineskip", "oninterlineskip", "nointerlineskip",
--
- "strut", "halfstrut", "quarterstrut", "depthstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
+ "strut", "halfstrut", "quarterstrut", "depthstrut", "halflinestrut", "noheightstrut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "strutgap", "begstrut", "endstrut", "lineheight",
"leftboundary", "rightboundary", "signalcharacter",
--
"ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
@@ -370,7 +374,8 @@ return {
--
"startimath", "stopimath", "normalstartimath", "normalstopimath",
"startdmath", "stopdmath", "normalstartdmath", "normalstopdmath",
- "normalsuperscript", "normalsubscript",
+ "normalsuperscript", "normalsubscript", "normalnosuperscript", "normalnosubscript",
+ "superscript", "subscript", "nosuperscript", "nosubscript",
--
"uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette",
"mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox",
@@ -411,7 +416,9 @@ return {
"rtlhbox", "ltrhbox", "rtlvbox", "ltrvbox", "rtlvtop", "ltrvtop",
"autodirhbox", "autodirvbox", "autodirvtop",
"leftorrighthbox", "leftorrightvbox", "leftorrightvtop",
- "lefttoright", "righttoleft","synchronizelayoutdirection","synchronizedisplaydirection","synchronizeinlinedirection",
+ "lefttoright", "righttoleft", "checkedlefttoright", "checkedrighttoleft",
+ "synchronizelayoutdirection","synchronizedisplaydirection","synchronizeinlinedirection",
+ "dirlre", "dirrle", "dirlro", "dirrlo",
--
"lesshyphens", "morehyphens", "nohyphens", "dohyphens",
--
@@ -421,10 +428,12 @@ return {
--
"nospace", "nospacing", "dospacing",
--
- "naturalhbox", "naturalvbox", "naturalhpack", "naturalvpack",
+ "naturalhbox", "naturalvbox", "naturalvtop", "naturalhpack", "naturalvpack",
--
"frule",
--
"compoundhyphenpenalty",
+ --
+ "start", "stop",
}
}
diff --git a/tex/context/base/mkiv/mult-mes.lua b/tex/context/base/mkiv/mult-mes.lua
index 979831726..1eb429763 100644
--- a/tex/context/base/mkiv/mult-mes.lua
+++ b/tex/context/base/mkiv/mult-mes.lua
@@ -490,6 +490,17 @@ return {
no = "there is nothing to split",
ro = "there is nothing to split",
},
+ ["floatblocks:14"] = {
+ en = "float locations will be forced to %a due to exporting",
+ nl = "blok plaatsing is altijd %a vanwege export",
+ },
+ ["floatblocks:15"] = {
+ en = "location %a changed to %a due to exporting",
+ nl = "plaatsing %a vervangen door %a vanwege export",
+ },
+ ["floatblocks:16"] = {
+ en = "anchor=orange, shift=red, box=green, dummy=blue, line=cyan, depth=magenta, prev=orange",
+ },
["floatblocks:1"] = {
en = "%a is empty",
},
@@ -514,14 +525,14 @@ return {
ro = "%a mutat",
},
["floatblocks:4"] = {
- cs = "%a umisteno",
- de = "%a plaziert",
- en = "%a placed",
- fr = "%a placé",
- it = "%a sistemato",
- nl = "%a geplaatst",
- no = "%a plassert",
- ro = "%a plasat",
+ cs = "%a umisteno (%s)",
+ de = "%a plaziert (%s)",
+ en = "%a placed (%s)",
+ fr = "%a placé (%s)",
+ it = "%a sistemato (%s)",
+ nl = "%a geplaatst (%s)",
+ no = "%a plassert (%s)",
+ ro = "%a plasat (%s)",
},
["floatblocks:5"] = {
cs = "poradi prizpusobeno",
diff --git a/tex/context/base/mkiv/mult-prm.lua b/tex/context/base/mkiv/mult-prm.lua
index 956f83636..601a615a0 100644
--- a/tex/context/base/mkiv/mult-prm.lua
+++ b/tex/context/base/mkiv/mult-prm.lua
@@ -1,17 +1,8 @@
--- the tex table has overlap
-
return {
["aleph"]={
- "AlephVersion",
"Alephminorversion",
"Alephrevision",
"Alephversion",
- "Omegaminorversion",
- "Omegarevision",
- "Omegaversion",
- "boxdir",
- "pagebottomoffset",
- "pagerightoffset",
},
["etex"]={
"botmarks",
@@ -85,9 +76,7 @@ return {
"Udelimiterover",
"Udelimiterunder",
"Uhextensible",
- "Uleft",
"Umathaccent",
---"Umathaccents",
"Umathaxis",
"Umathbinbinspacing",
"Umathbinclosespacing",
@@ -97,7 +86,6 @@ return {
"Umathbinordspacing",
"Umathbinpunctspacing",
"Umathbinrelspacing",
---"Umathbotaccent",
"Umathchar",
"Umathcharclass",
"Umathchardef",
@@ -115,7 +103,6 @@ return {
"Umathcloserelspacing",
"Umathcode",
"Umathcodenum",
- "Umathcodenumdef",
"Umathconnectoroverlapmin",
"Umathfractiondelsize",
"Umathfractiondenomdown",
@@ -137,6 +124,8 @@ return {
"Umathlimitbelowbgap",
"Umathlimitbelowkern",
"Umathlimitbelowvgap",
+ "Umathnolimitsubfactor",
+ "Umathnolimitsupfactor",
"Umathopbinspacing",
"Umathopclosespacing",
"Umathopenbinspacing",
@@ -210,12 +199,10 @@ return {
"Umathunderbarvgap",
"Umathunderdelimiterbgap",
"Umathunderdelimitervgap",
- "Umathnolimitsupfactor",
- "Umathnolimitsubfactor",
- "Umiddle",
+ "Unosubscript",
+ "Unosuperscript",
"Uoverdelimiter",
"Uradical",
- "Uright",
"Uroot",
"Uskewed",
"Uskewedwithdelims",
@@ -233,18 +220,19 @@ return {
"aligntab",
"attribute",
"attributedef",
- "hyphenpenaltymode",
+ "automaticdiscretionary",
"automatichyphenmode",
"automatichyphenpenalty",
- "automaticdiscretionary",
- "explicithyphenpenalty",
- "explicitdiscretionary",
+ "begincsname",
"bodydir",
- "boundary",
+ "bodydirection",
"boxdir",
+ "boxdirection",
+ "breakafterdirmode",
"catcodetable",
"clearmarks",
"copyfont",
+ "compoundhyphenmode",
"crampeddisplaystyle",
"crampedscriptscriptstyle",
"crampedscriptstyle",
@@ -254,19 +242,27 @@ return {
"dvifeedback",
"dvivariable",
"efcode",
- "hjcode",
- "firstvalidlanguage",
+ "etoksapp",
+ "etokspre",
+ "expanded",
+ "expandglyphsinfont",
+ "explicitdiscretionary",
+ "explicithyphenpenalty",
"fontid",
"formatname",
"gleaders",
- "hyphenationmin",
+ "hjcode",
"hyphenationbounds",
+ "hyphenationmin",
+ "hyphenpenaltymode",
"ifabsdim",
"ifabsnum",
+ "ifincsname",
"ifprimitive",
"ignoreligaturesinfont",
"initcatcodetable",
"insertht",
+ "lastnamedcs",
"lastsavedboxresourceindex",
"lastsavedimageresourceindex",
"lastsavedimageresourcepages",
@@ -278,57 +274,65 @@ return {
"letcharcode",
"letterspacefont",
"linedir",
+ "linedirection",
"localbrokenpenalty",
"localinterlinepenalty",
"localleftbox",
"localrightbox",
"lpcode",
"luaescapestring",
+ "luafunction",
"luatexbanner",
"luatexrevision",
"luatexversion",
- "luafunction",
+ "mathdelimitersmode",
"mathdir",
+ "mathdirection",
"mathdisplayskipmode",
"matheqnogapstep",
"mathitalicsmode",
"mathnolimitsmode",
"mathoption",
+ "mathpenaltiesmode",
+ "mathrulesfam",
+ "mathrulesmode",
"mathscriptsmode",
+ "mathscriptboxmode",
"mathstyle",
- "mathsurroundskip",
"mathsurroundmode",
- "mathrulesmode",
- "mathrulesfam",
- "noboundary",
- "nokerns",
+ "mathsurroundskip",
"nohrule",
+ "nokerns",
"noligs",
+ "normaldeviate",
"nospaces",
"novrule",
- "normaldeviate",
"outputbox",
"outputmode",
+ "pagebottomoffset",
"pagedir",
+ "pagedirection",
"pageheight",
- "pagebottomoffset",
"pageleftoffset",
"pagerightoffset",
"pagetopoffset",
"pagewidth",
"pardir",
+ "pardirection",
"pdfextension",
"pdffeedback",
"pdfvariable",
"postexhyphenchar",
"posthyphenchar",
+ "prebinoppenalty",
"predisplaygapfactor",
"preexhyphenchar",
"prehyphenchar",
+ "prerelpenalty",
"primitive",
"protrudechars",
- "protrusionboundary",
"pxdimen",
+ "quitvmode",
"randomseed",
"rightghost",
"rightmarginkern",
@@ -344,65 +348,29 @@ return {
"suppressfontnotfounderror",
"suppressifcsnameerror",
"suppresslongerror",
- "suppressoutererror",
"suppressmathparerror",
+ "suppressoutererror",
+ "suppressprimitiveerror",
"synctex",
"tagcode",
"textdir",
+ "textdirection",
+ "toksapp",
+ "tokspre",
"tracingfonts",
"uniformdeviate",
"useboxresource",
"useimageresource",
- "wordboundary",
- --
- "vpack",
- "hpack",
- "tpack",
- "csstring",
- "begincsname",
- "lastnamedcs",
- "toksapp",
- "tokspre",
- "etoksapp",
- "etokspre",
},
["omega"]={
- "OmegaVersion",
- "bodydir",
- "chardp",
- "charht",
- "charit",
- "charwd",
- "leftghost",
- "localbrokenpenalty",
- "localinterlinepenalty",
- "localleftbox",
- "localrightbox",
- "mathdir",
- "odelcode",
- "odelimiter",
- "omathaccent",
- "omathchar",
- "omathchardef",
- "omathcode",
- "oradical",
- "pagedir",
- "pageheight",
- "pagewidth",
- "pardir",
- "rightghost",
- "textdir",
+ "Omegaminorversion",
+ "Omegarevision",
+ "Omegaversion",
},
["pdftex"]={
- "efcode",
- "expanded",
- "ifincsname",
"ifpdfabsdim",
"ifpdfabsnum",
"ifpdfprimitive",
- "leftmarginkern",
- "letterspacefont",
- "lpcode",
"pdfadjustspacing",
"pdfannot",
"pdfcatalog",
@@ -415,8 +383,8 @@ return {
"pdfdest",
"pdfdestmargin",
"pdfdraftmode",
- "pdfeachlineheight",
"pdfeachlinedepth",
+ "pdfeachlineheight",
"pdfendlink",
"pdfendthread",
"pdffirstlineheight",
@@ -425,24 +393,22 @@ return {
"pdffontname",
"pdffontobjnum",
"pdffontsize",
- "pdfxformmargin",
"pdfgamma",
"pdfgentounicode",
"pdfglyphtounicode",
"pdfhorigin",
"pdfignoreddimen",
+ "pdfignoreunknownimages",
+ "pdfimageaddfilename",
"pdfimageapplygamma",
"pdfimagegamma",
"pdfimagehicolor",
"pdfimageresolution",
"pdfincludechars",
"pdfinclusioncopyfonts",
- "pdfignoreunknownimages",
"pdfinclusionerrorlevel",
- "pdfignoreunknownimages",
"pdfinfo",
"pdfinfoomitdate",
- "pdftrailerid",
"pdfinsertht",
"pdflastannot",
"pdflastlinedepth",
@@ -457,6 +423,7 @@ return {
"pdfliteral",
"pdfmapfile",
"pdfmapline",
+ "pdfmajorversion",
"pdfminorversion",
"pdfnames",
"pdfnoligatures",
@@ -472,9 +439,9 @@ return {
"pdfpageresources",
"pdfpagesattr",
"pdfpagewidth",
+ "pdfpkfixeddpi",
"pdfpkmode",
"pdfpkresolution",
- "pdfpkfixeddpi",
"pdfprimitive",
"pdfprotrudechars",
"pdfpxdimen",
@@ -491,188 +458,33 @@ return {
"pdfsetrandomseed",
"pdfstartlink",
"pdfstartthread",
+ "pdfsuppressoptionalinfo",
+ "pdfsuppressptexinfo",
"pdftexbanner",
"pdftexrevision",
"pdftexversion",
- "pdfsuppressptexinfo",
- "pdfsuppressoptionalinfo",
"pdfthread",
"pdfthreadmargin",
"pdftracingfonts",
"pdftrailer",
+ "pdftrailerid",
"pdfuniformdeviate",
"pdfuniqueresname",
"pdfvorigin",
"pdfxform",
"pdfxformattr",
+ "pdfxformmargin",
"pdfxformname",
"pdfxformresources",
"pdfximage",
- "quitvmode",
- "rightmarginkern",
- "rpcode",
- "tagcode",
},
["tex"]={
+ " ",
"-",
"/",
- "AlephVersion",
- "Alephminorversion",
- "Alephrevision",
- "Alephversion",
- "OmegaVersion",
- "Omegaminorversion",
- "Omegarevision",
- "Omegaversion",
- "Udelcode",
- "Udelcodenum",
- "Udelimiter",
- "Udelimiterover",
- "Udelimiterunder",
- "Uhextensible",
"Uleft",
- "Umathaccent",
---"Umathaccents",
- "Umathaxis",
- "Umathbinbinspacing",
- "Umathbinclosespacing",
- "Umathbininnerspacing",
- "Umathbinopenspacing",
- "Umathbinopspacing",
- "Umathbinordspacing",
- "Umathbinpunctspacing",
- "Umathbinrelspacing",
---"Umathbotaccent",
- "Umathchar",
- "Umathcharclass",
- "Umathchardef",
- "Umathcharfam",
- "Umathcharnum",
- "Umathcharnumdef",
- "Umathcharslot",
- "Umathclosebinspacing",
- "Umathcloseclosespacing",
- "Umathcloseinnerspacing",
- "Umathcloseopenspacing",
- "Umathcloseopspacing",
- "Umathcloseordspacing",
- "Umathclosepunctspacing",
- "Umathcloserelspacing",
- "Umathcode",
- "Umathcodenum",
- "Umathcodenumdef",
- "Umathconnectoroverlapmin",
- "Umathfractiondelsize",
- "Umathfractiondenomdown",
- "Umathfractiondenomvgap",
- "Umathfractionnumup",
- "Umathfractionnumvgap",
- "Umathfractionrule",
- "Umathinnerbinspacing",
- "Umathinnerclosespacing",
- "Umathinnerinnerspacing",
- "Umathinneropenspacing",
- "Umathinneropspacing",
- "Umathinnerordspacing",
- "Umathinnerpunctspacing",
- "Umathinnerrelspacing",
- "Umathlimitabovebgap",
- "Umathlimitabovekern",
- "Umathlimitabovevgap",
- "Umathlimitbelowbgap",
- "Umathlimitbelowkern",
- "Umathlimitbelowvgap",
- "Umathopbinspacing",
- "Umathopclosespacing",
- "Umathopenbinspacing",
- "Umathopenclosespacing",
- "Umathopeninnerspacing",
- "Umathopenopenspacing",
- "Umathopenopspacing",
- "Umathopenordspacing",
- "Umathopenpunctspacing",
- "Umathopenrelspacing",
- "Umathoperatorsize",
- "Umathopinnerspacing",
- "Umathopopenspacing",
- "Umathopopspacing",
- "Umathopordspacing",
- "Umathoppunctspacing",
- "Umathoprelspacing",
- "Umathordbinspacing",
- "Umathordclosespacing",
- "Umathordinnerspacing",
- "Umathordopenspacing",
- "Umathordopspacing",
- "Umathordordspacing",
- "Umathordpunctspacing",
- "Umathordrelspacing",
- "Umathoverbarkern",
- "Umathoverbarrule",
- "Umathoverbarvgap",
- "Umathoverdelimiterbgap",
- "Umathoverdelimitervgap",
- "Umathpunctbinspacing",
- "Umathpunctclosespacing",
- "Umathpunctinnerspacing",
- "Umathpunctopenspacing",
- "Umathpunctopspacing",
- "Umathpunctordspacing",
- "Umathpunctpunctspacing",
- "Umathpunctrelspacing",
- "Umathquad",
- "Umathradicaldegreeafter",
- "Umathradicaldegreebefore",
- "Umathradicaldegreeraise",
- "Umathradicalkern",
- "Umathradicalrule",
- "Umathradicalvgap",
- "Umathrelbinspacing",
- "Umathrelclosespacing",
- "Umathrelinnerspacing",
- "Umathrelopenspacing",
- "Umathrelopspacing",
- "Umathrelordspacing",
- "Umathrelpunctspacing",
- "Umathrelrelspacing",
- "Umathskewedfractionhgap",
- "Umathskewedfractionvgap",
- "Umathspaceafterscript",
- "Umathstackdenomdown",
- "Umathstacknumup",
- "Umathstackvgap",
- "Umathsubshiftdown",
- "Umathsubshiftdrop",
- "Umathsubsupshiftdown",
- "Umathsubsupvgap",
- "Umathsubtopmax",
- "Umathsupbottommin",
- "Umathsupshiftdrop",
- "Umathsupshiftup",
- "Umathsupsubbottommax",
- "Umathunderbarkern",
- "Umathunderbarrule",
- "Umathunderbarvgap",
- "Umathunderdelimiterbgap",
- "Umathunderdelimitervgap",
- "Umathnolimitsupfactor",
- "Umathnolimitsubfactor",
"Umiddle",
- "Uoverdelimiter",
- "Uradical",
"Uright",
- "Uroot",
- "Uskewed",
- "Uskewedwithdelims",
- "Ustack",
- "Ustartdisplaymath",
- "Ustartmath",
- "Ustopdisplaymath",
- "Ustopmath",
- "Usubscript",
- "Usuperscript",
- "Uunderdelimiter",
- "Uvextensible",
"above",
"abovedisplayshortskip",
"abovedisplayskip",
@@ -682,18 +494,8 @@ return {
"advance",
"afterassignment",
"aftergroup",
- "alignmark",
- "aligntab",
"atop",
"atopwithdelims",
- "attribute",
- "attributedef",
- "hyphenpenaltymode",
- "automatichyphenmode",
- "automatichyphenpenalty",
- "automaticdiscretionary",
- "explicithyphenpenalty",
- "explicitdiscretionary",
"badness",
"baselineskip",
"batchmode",
@@ -701,44 +503,25 @@ return {
"belowdisplayshortskip",
"belowdisplayskip",
"binoppenalty",
- "bodydir",
"botmark",
- "botmarks",
"boundary",
"box",
- "boxdir",
"boxmaxdepth",
"brokenpenalty",
"catcode",
- "catcodetable",
"char",
"chardef",
---"chardp",
---"charht",
---"charit",
---"charwd",
"cleaders",
- "clearmarks",
"closein",
"closeout",
- "clubpenalties",
"clubpenalty",
"copy",
- "copyfont",
"count",
"countdef",
"cr",
- "crampeddisplaystyle",
- "crampedscriptscriptstyle",
- "crampedscriptstyle",
- "crampedtextstyle",
"crcr",
"csname",
- "currentgrouplevel",
- "currentgrouptype",
- "currentifbranch",
- "currentiflevel",
- "currentiftype",
+ "csstring",
"day",
"deadcycles",
"def",
@@ -748,32 +531,20 @@ return {
"delimiter",
"delimiterfactor",
"delimitershortfall",
- "detokenize",
"dimen",
"dimendef",
- "dimexpr",
"directlua",
"discretionary",
"displayindent",
"displaylimits",
"displaystyle",
- "displaywidowpenalties",
"displaywidowpenalty",
"displaywidth",
"divide",
"doublehyphendemerits",
"dp",
"dump",
- "dviextension",
- "dvifeedback",
- "dvivariable",
- "eTeXVersion",
- "eTeXminorversion",
- "eTeXrevision",
- "eTeXversion",
"edef",
- "efcode",
- "hjcode",
"else",
"emergencystretch",
"end",
@@ -789,7 +560,6 @@ return {
"escapechar",
"everycr",
"everydisplay",
- "everyeof",
"everyhbox",
"everyjob",
"everymath",
@@ -798,35 +568,19 @@ return {
"exhyphenchar",
"exhyphenpenalty",
"expandafter",
- "expanded",
- "expandglyphsinfont",
"fam",
"fi",
"finalhyphendemerits",
"firstmark",
- "firstmarks",
+ "firstvalidlanguage",
"floatingpenalty",
"font",
- "fontchardp",
- "fontcharht",
- "fontcharic",
- "fontcharwd",
"fontdimen",
- "firstvalidlanguage",
- "fontid",
"fontname",
- "formatname",
"futurelet",
"gdef",
- "gleaders",
"global",
"globaldefs",
- "glueexpr",
- "glueshrink",
- "glueshrinkorder",
- "gluestretch",
- "gluestretchorder",
- "gluetomu",
"halign",
"hangafter",
"hangindent",
@@ -838,6 +592,7 @@ return {
"hfuzz",
"hoffset",
"holdinginserts",
+ "hpack",
"hrule",
"hsize",
"hskip",
@@ -846,94 +601,55 @@ return {
"hyphenation",
"hyphenchar",
"hyphenpenalty",
- "hyphenationmin",
- "hyphenationbounds",
"if",
- "ifabsdim",
- "ifabsnum",
"ifcase",
"ifcat",
- "ifcsname",
- "ifdefined",
"ifdim",
"ifeof",
"iffalse",
- "iffontchar",
"ifhbox",
"ifhmode",
- "ifincsname",
"ifinner",
"ifmmode",
"ifnum",
"ifodd",
- "ifpdfabsdim",
- "ifpdfabsnum",
- "ifpdfprimitive",
- "ifprimitive",
"iftrue",
"ifvbox",
"ifvmode",
"ifvoid",
"ifx",
- "ignoreligaturesinfont",
"ignorespaces",
"immediate",
"indent",
- "initcatcodetable",
"input",
"inputlineno",
"insert",
"insertpenalties",
- "interactionmode",
- "interlinepenalties",
"interlinepenalty",
"jobname",
"kern",
"language",
"lastbox",
"lastkern",
- "lastlinefit",
- "lastnodetype",
"lastpenalty",
- "lastsavedboxresourceindex",
- "lastsavedimageresourceindex",
- "lastsavedimageresourcepages",
"lastskip",
- "lastxpos",
- "lastypos",
- "latelua",
"lccode",
"leaders",
"left",
- "leftghost",
"lefthyphenmin",
- "leftmarginkern",
"leftskip",
"leqno",
"let",
- "letcharcode",
- "letterspacefont",
"limits",
- "linedir",
"linepenalty",
"lineskip",
"lineskiplimit",
- "localbrokenpenalty",
- "localinterlinepenalty",
- "localleftbox",
- "localrightbox",
"long",
"looseness",
"lower",
"lowercase",
- "lpcode",
- "luaescapestring",
- "luatexbanner",
- "luatexrevision",
- "luatexversion",
"mag",
"mark",
- "marks",
"mathaccent",
"mathbin",
"mathchar",
@@ -941,24 +657,12 @@ return {
"mathchoice",
"mathclose",
"mathcode",
- "mathdir",
- "mathdisplayskipmode",
- "matheqnogapstep",
"mathinner",
- "mathitalicsmode",
- "mathnolimitsmode",
"mathop",
"mathopen",
- "mathoption",
"mathord",
"mathpunct",
"mathrel",
- "mathrulesmode",
- "mathrulesfam",
- "mathscriptsmode",
- "mathstyle",
- "mathsurroundskip",
- "mathsurroundmode",
"mathsurround",
"maxdeadcycles",
"maxdepth",
@@ -971,225 +675,63 @@ return {
"moveleft",
"moveright",
"mskip",
- "muexpr",
"multiply",
"muskip",
"muskipdef",
- "mutoglue",
"newlinechar",
"noalign",
"noboundary",
"noexpand",
"noindent",
- "nokerns",
- "nohrule",
- "noligs",
- "nospaces",
- "novrule",
"nolimits",
- "nolocaldirs",
- "nolocalwhatsits",
"nonscript",
"nonstopmode",
- "normaldeviate",
"nulldelimiterspace",
"nullfont",
"number",
- "numexpr",
- "odelcode",
- "odelimiter",
- "omathaccent",
- "omathchar",
- "omathchardef",
- "omathcode",
"omit",
"openin",
"openout",
"or",
- "oradical",
"outer",
"output",
- "outputbox",
"outputpenalty",
"over",
"overfullrule",
"overline",
"overwithdelims",
- "pagebottomoffset",
"pagedepth",
- "pagedir",
- "pagediscards",
"pagefilllstretch",
"pagefillstretch",
"pagefilstretch",
"pagegoal",
- "pageheight",
- "pageleftoffset",
- "pagerightoffset",
"pageshrink",
"pagestretch",
- "pagetopoffset",
"pagetotal",
- "pagewidth",
"par",
- "pardir",
"parfillskip",
"parindent",
"parshape",
- "parshapedimen",
- "parshapeindent",
- "parshapelength",
"parskip",
"patterns",
"pausing",
- "pdfadjustspacing",
- "pdfannot",
- "pdfcatalog",
- "pdfcolorstack",
- "pdfcolorstackinit",
- "pdfcompresslevel",
- "pdfcopyfont",
- "pdfcreationdate",
- "pdfdecimaldigits",
- "pdfdest",
- "pdfdestmargin",
- "pdfdraftmode",
- "pdfeachlineheight",
- "pdfeachlinedepth",
- "pdfendlink",
- "pdfendthread",
- "pdfextension",
- "pdfvariable",
- "pdffirstlineheight",
- "pdffontattr",
- "pdffontexpand",
- "pdffontname",
- "pdffontobjnum",
- "pdffontsize",
- "pdfxformmargin",
- "pdfgamma",
- "pdfgentounicode",
- "pdfglyphtounicode",
- "pdfhorigin",
- "pdfignoreddimen",
- "pdfimageaddfilename",
- "pdfimageapplygamma",
- "pdfimagegamma",
- "pdfimagehicolor",
- "pdfimageresolution",
- "pdfincludechars",
- "pdfinclusioncopyfonts",
- "pdfinclusionerrorlevel",
- "pdfignoreunknownimages",
- "pdfinfo",
- "pdfinfoomitdate",
- "pdftrailerid",
- "pdfinsertht",
- "pdflastannot",
- "pdflastlinedepth",
- "pdflastlink",
- "pdflastobj",
- "pdflastxform",
- "pdflastximage",
- "pdflastximagepages",
- "pdflastxpos",
- "pdflastypos",
- "pdflinkmargin",
- "pdfliteral",
- "pdfmapfile",
- "pdfmapline",
- "pdfminorversion",
- "pdfnames",
- "pdfnoligatures",
- "pdfnormaldeviate",
- "pdfobj",
- "pdfobjcompresslevel",
- "pdfoutline",
- "pdfoutput",
- "pdfpageattr",
- "pdfpagebox",
- "pdfpageheight",
- "pdfpageref",
- "pdfpageresources",
- "pdfpagesattr",
- "pdfpagewidth",
- "pdfpkmode",
- "pdfpkresolution",
- "pdfpkfixeddpi",
- "pdfprimitive",
- "pdfprotrudechars",
- "pdfpxdimen",
- "pdfrandomseed",
- "pdfrefobj",
- "pdfrefxform",
- "pdfrefximage",
- "pdfreplacefont",
- "pdfrestore",
- "pdfretval",
- "pdfsave",
- "pdfsavepos",
- "pdfsetmatrix",
- "pdfsetrandomseed",
- "pdfstartlink",
- "pdfstartthread",
- "pdftexbanner",
- "pdftexrevision",
- "pdftexversion",
- "pdfsuppressptexinfo",
- "pdfsuppressoptionalinfo",
- "pdfthread",
- "pdfthreadmargin",
- "pdftracingfonts",
- "pdftrailer",
- "pdfuniformdeviate",
- "pdfuniqueresname",
- "pdfvorigin",
- "pdfxform",
- "pdfxformattr",
- "pdfxformname",
- "pdfxformresources",
- "pdfximage",
"penalty",
"postdisplaypenalty",
- "postexhyphenchar",
- "posthyphenchar",
- "predisplaydirection",
- "predisplaygapfactor",
"predisplaypenalty",
"predisplaysize",
- "preexhyphenchar",
- "prehyphenchar",
"pretolerance",
"prevdepth",
"prevgraf",
- "primitive",
- "protected",
- "protrudechars",
"protrusionboundary",
- "pxdimen",
- "quitvmode",
"radical",
"raise",
- "randomseed",
"read",
- "readline",
"relax",
"relpenalty",
"right",
- "rightghost",
"righthyphenmin",
- "rightmarginkern",
"rightskip",
"romannumeral",
- "rpcode",
- "saveboxresource",
- "saveimageresource",
- "savepos",
- "savecatcodetable",
- "savinghyphcodes",
- "savingvdiscards",
- "scantextokens",
- "scantokens",
"scriptfont",
"scriptscriptfont",
"scriptscriptstyle",
@@ -1197,46 +739,28 @@ return {
"scriptstyle",
"scrollmode",
"setbox",
- "setfontid",
"setlanguage",
- "setrandomseed",
- "shapemode",
"sfcode",
"shipout",
"show",
"showbox",
"showboxbreadth",
"showboxdepth",
- "showgroups",
- "showifs",
"showlists",
"showthe",
- "showtokens",
"skewchar",
"skip",
"skipdef",
---"skipexpr",
"spacefactor",
"spaceskip",
"span",
"special",
"splitbotmark",
- "splitbotmarks",
- "splitdiscards",
"splitfirstmark",
- "splitfirstmarks",
"splitmaxdepth",
"splittopskip",
"string",
- "suppressfontnotfounderror",
- "suppressifcsnameerror",
- "suppresslongerror",
- "suppressoutererror",
- "suppressmathparerror",
- "synctex",
"tabskip",
- "tagcode",
- "textdir",
"textfont",
"textstyle",
"the",
@@ -1247,39 +771,28 @@ return {
"toksdef",
"tolerance",
"topmark",
- "topmarks",
"topskip",
- "tracingassigns",
+ "tpack",
"tracingcommands",
- "tracingfonts",
- "tracinggroups",
- "tracingifs",
"tracinglostchars",
"tracingmacros",
- "tracingnesting",
"tracingonline",
"tracingoutput",
"tracingpages",
"tracingparagraphs",
"tracingrestores",
- "tracingscantokens",
"tracingstats",
"uccode",
"uchyph",
"underline",
- "unexpanded",
"unhbox",
"unhcopy",
- "uniformdeviate",
"unkern",
- "unless",
"unpenalty",
"unskip",
"unvbox",
"unvcopy",
"uppercase",
- "useboxresource",
- "useimageresource",
"vadjust",
"valign",
"vbadness",
@@ -1290,6 +803,7 @@ return {
"vfilneg",
"vfuzz",
"voffset",
+ "vpack",
"vrule",
"vsize",
"vskip",
@@ -1297,7 +811,6 @@ return {
"vss",
"vtop",
"wd",
- "widowpenalties",
"widowpenalty",
"wordboundary",
"write",
@@ -1305,17 +818,6 @@ return {
"xleaders",
"xspaceskip",
"year",
- --
- "vpack",
- "hpack",
- "tpack",
- "csstring",
- "begincsname",
- "lastnamedcs",
- "toksapp",
- "tokspre",
- "etoksapp",
- "etokspre",
},
["xetex"]={
"XeTeXversion",
diff --git a/tex/context/base/mkiv/mult-prm.mkiv b/tex/context/base/mkiv/mult-prm.mkiv
index e385341c2..1b9195f41 100644
--- a/tex/context/base/mkiv/mult-prm.mkiv
+++ b/tex/context/base/mkiv/mult-prm.mkiv
@@ -13,63 +13,95 @@
%D This file is only a helper for generating files that can be used in an
%D editor for syntax highlighting.
+% local all = table.load("mult-prm.lua")
+% local tex = table.tohash(all.tex)
+% for k, v in next, all do
+% if k ~= "tex" then
+% local h = table.tohash(v)
+% for k, v in next, h do
+% tex[k] = nil
+% end
+% all[k] = table.sortedkeys(h)
+% end
+% end
+% all.tex = table.sortedkeys(tex)
+% print((string.gsub(table.serialize(all,true),' "','\n "')))
+
\startluacode
context.starttext()
- function table.sorted(t)
- table.sort(t)
- return t
- end
-
local missing = {
tex = {
- "def", "catcode", "futurelet", "chardef",
- "voffset", "hoffset",
- "numexpr", "dimexpr",
- "write", "dump", "skipdef,"
},
etex = {
- "savingvdiscards",
- },
- pdftex = {
},
luatex = {
},
- aleph = {
- "AlephVersion", "Alephminorversion", "Alephrevision", "Alephversion",
+ pdftex = { -- maybe not complete
+ "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "pdfadjustspacing",
+ "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit",
+ "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate",
+ "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode",
+ "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink",
+ "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand",
+ "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma",
+ "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin",
+ "pdfignoreddimen", "pdfignoreunknownimages", "pdfimageaddfilename",
+ "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor",
+ "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts",
+ "pdfinclusionerrorlevel", "pdfinfo", "pdfinfoomitdate",
+ "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink",
+ "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages",
+ "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral",
+ "pdfmapfile", "pdfmapline", "pdfmajorversion", "pdfminorversion", "pdfnames",
+ "pdfnoligatures", "pdfnormaldeviate", "pdfobj",
+ "pdfobjcompresslevel", "pdfoutline", "pdfoutput", "pdfpageattr",
+ "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources",
+ "pdfpagesattr", "pdfpagewidth", "pdfpkfixeddpi", "pdfpkmode",
+ "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen",
+ "pdfrandomseed", "pdfrefobj", "pdfrefxform", "pdfrefximage",
+ "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos",
+ "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread",
+ "pdfsuppressoptionalinfo", "pdfsuppressptexinfo", "pdftexbanner",
+ "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin",
+ "pdftracingfonts", "pdftrailer", "pdftrailerid", "pdfuniformdeviate",
+ "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr",
+ "pdfxformmargin", "pdfxformname", "pdfxformresources", "pdfximage",
},
- omega = {
+ aleph = { -- we don't bother
+ "Alephminorversion", "Alephrevision", "Alephversion",
+ },
+ omega = { -- we don't bother
"Omegaminorversion", "Omegarevision", "Omegaversion",
- "omathcode", "odelcode", "omathchardef", "omathchar", "omathaccent", "odelimiter", "oradical",
},
- xetex = {
+ xetex = { -- we don't bother
"XeTeXversion",
},
- plain = {
- "TeX",
- "bgroup", "egroup", "endgraf", "space", "empty", "null",
- "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newhelp", "newread", "newwrite", "newfam", "newlanguage", "newinsert", "newif",
- "maxdimen", "magstephalf", "magstep",
- "frenchspacing", "nonfrenchspacing", "normalbaselines", "obeylines", "obeyspaces", "raggedright", "ttraggedright",
- "thinspace", "negthinspace", "enspace", "enskip", "quad", "qquad",
- "smallskip", "medskip", "bigskip", "removelastskip", "topglue", "vglue", "hglue",
- "break", "nobreak", "allowbreak", "filbreak", "goodbreak", "smallbreak", "medbreak", "bigbreak",
- "line", "leftline", "rightline", "centerline", "rlap", "llap", "underbar", "strutbox", "strut",
- "cases", "matrix", "pmatrix", "bordermatrix", "eqalign", "displaylines", "eqalignno", "leqalignno",
- "pageno", "folio", "tracingall", "showhyphens", "fmtname", "fmtversion",
- "hphantom", "vphantom", "phantom", "smash",
- },
+ -- plain = {
+ -- "TeX",
+ -- "bgroup", "egroup", "endgraf", "space", "empty", "null",
+ -- "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newhelp", "newread", "newwrite", "newfam", "newlanguage", "newinsert", "newif",
+ -- "maxdimen", "magstephalf", "magstep",
+ -- "frenchspacing", "nonfrenchspacing", "normalbaselines", "obeylines", "obeyspaces", "raggedright", "ttraggedright",
+ -- "thinspace", "negthinspace", "enspace", "enskip", "quad", "qquad",
+ -- "smallskip", "medskip", "bigskip", "removelastskip", "topglue", "vglue", "hglue",
+ -- "break", "nobreak", "allowbreak", "filbreak", "goodbreak", "smallbreak", "medbreak", "bigbreak",
+ -- "line", "leftline", "rightline", "centerline", "rlap", "llap", "underbar", "strutbox", "strut",
+ -- "cases", "matrix", "pmatrix", "bordermatrix", "eqalign", "displaylines", "eqalignno", "leqalignno",
+ -- "pageno", "folio", "tracingall", "showhyphens", "fmtname", "fmtversion",
+ -- "hphantom", "vphantom", "phantom", "smash",
+ -- },
}
local primitives = {
- tex = table.sorted( table.merged( missing.tex , table.fromhash(tex.primitives()) ) ),
- etex = table.sorted( table.merged( missing.etex , tex.extraprimitives('etex') ) ),
- pdftex = table.sorted( table.merged( missing.pdftex, tex.extraprimitives('pdftex') ) ),
- luatex = table.sorted( table.merged( missing.luatex, tex.extraprimitives('luatex') ) ),
- aleph = table.sorted( table.merged( missing.aleph , tex.extraprimitives('aleph') ) ),
- omega = table.sorted( table.merged( missing.omega , tex.extraprimitives('omega') ) ),
- xetex = table.sorted( table.merged( missing.xetex , { } ) ),
+ tex = table.sorted( table.merged( missing.tex , tex.extraprimitives("core","tex") ) ),
+ etex = table.sorted( table.merged( missing.etex , tex.extraprimitives("etex") ) ),
+ pdftex = table.sorted( table.merged( missing.pdftex, { } ) ),
+ luatex = table.sorted( table.merged( missing.luatex, tex.extraprimitives("luatex") ) ),
+ aleph = table.sorted( table.merged( missing.aleph , { } ) ),
+ omega = table.sorted( table.merged( missing.omega , { } ) ),
+ xetex = table.sorted( table.merged( missing.xetex , { } ) ),
}
-- table.remove(primitives.tex,1) -- get rid of \-
diff --git a/tex/context/base/mkiv/mult-sys.mkiv b/tex/context/base/mkiv/mult-sys.mkiv
index bd3ff9b3d..7000eed7b 100644
--- a/tex/context/base/mkiv/mult-sys.mkiv
+++ b/tex/context/base/mkiv/mult-sys.mkiv
@@ -61,6 +61,7 @@
\definesystemconstant {gbenglish} \definesystemconstant {gb}
\definesystemconstant {german} \definesystemconstant {de}
\definesystemconstant {greek} \definesystemconstant {gr}
+\definesystemconstant {hebrew} \definesystemconstant {he}
\definesystemconstant {hungarian} \definesystemconstant {hu}
\definesystemconstant {italian} \definesystemconstant {it}
\definesystemconstant {japanese} \definesystemconstant {ja}
@@ -268,6 +269,7 @@
\definesystemconstant {internal}
\definesystemconstant {current}
\definesystemconstant {chain}
+\definesystemconstant {class}
% translating setups is asking for a mess so we keep them as-is:
@@ -302,8 +304,8 @@
\definesystemconstant {ucgreek}
\definesystemconstant {sygreek}
\definesystemconstant {italics}
-
-\definesystemconstant {run}
+\definesystemconstant {ligatures}
+\definesystemconstant {collapsing}
\definesystemconstant {default}
@@ -362,6 +364,8 @@
\definesystemconstant {kernpairs}
\definesystemconstant {mixedcolumn}
+\definesystemconstant {ampersand}
+
%definesystemconstant {property}
%definesystemconstant {overprint}
%definesystemconstant {layer}
@@ -477,6 +481,7 @@
\definemessageconstant {metapost}
\definemessageconstant {chemicals}
\definemessageconstant {publications}
+\definemessageconstant {backend}
%D When we use numbers and dimensions the same applies as with the keywords like
%D \type {width} and \type {plus} mentioned earlier.
@@ -628,47 +633,11 @@
%D The setup files for the language, font, color and special subsystems have a common
%D prefix. This means that we have at most three characters for unique filenames.
-\definefileconstant {colorprefix} {colo-}
-%definefileconstant {encodingprefix} {enco-}
-%definefileconstant {filterprefix} {filt-}
-\definefileconstant {fontprefix} {font-}
-%definefileconstant {handlingprefix} {hand-}
-%definefileconstant {javascriptprefix} {java-}
-%definefileconstant {languageprefix} {lang-}
-%definefileconstant {mathprefix} {math-}
-%definefileconstant {metapostprefix} {meta-}
-%definefileconstant {regimeprefix} {regi-}
-%definefileconstant {specialprefix} {spec-}
-\definefileconstant {symbolprefix} {symb-}
-\definefileconstant {typeprefix} {type-}
-%definefileconstant {xtagprefix} {xtag-}
-%definefileconstant {propprefix} {prop-}
-%definefileconstant {unicprefix} {unic-}
-%definefileconstant {sortprefix} {sort-}
-%definefileconstant {prettyprefix} {pret-}
-
-%definefileconstant {moduleprefix} {m-}
-%definefileconstant {styleprefix} {s-}
-%definefileconstant {xstyleprefix} {x-}
-%definefileconstant {privateprefix} {p-}
-%definefileconstant {thirdprefix} {t-}
-
-%definefileconstant {beforeprefix} {b-}
-%definefileconstant {afterprefix} {a-}
-
-% for old times sake:
-
-% \defineinterfaceconstant {x} {x}
-% \defineinterfaceconstant {xx} {xx}
-% \defineinterfaceconstant {em} {em} % will go
-
-% \defineinterfaceconstant {tf} {tf}
-% \defineinterfaceconstant {bf} {bf}
-% \defineinterfaceconstant {bs} {bs}
-% \defineinterfaceconstant {bi} {bi}
-% \defineinterfaceconstant {sl} {sl}
-% \defineinterfaceconstant {it} {it}
-% \defineinterfaceconstant {sc} {sc} % keep, used elsewhere (or define in mult-def)
+\definefileconstant {colo_run} {colo-run}
+\definefileconstant {font_run} {font-run}
+\definefileconstant {page_run} {page-run}
+\definefileconstant {symb_run} {symb-run}
+\definefileconstant {publ_tra} {publ-tra}
%D For figure inclusion we need(ed):
diff --git a/tex/context/base/mkiv/node-acc.lua b/tex/context/base/mkiv/node-acc.lua
index dccd7b7c0..03f6d7476 100644
--- a/tex/context/base/mkiv/node-acc.lua
+++ b/tex/context/base/mkiv/node-acc.lua
@@ -16,13 +16,11 @@ local tonut = nodes.tonut
local tonode = nodes.tonode
local getid = nuts.getid
-local getfield = nuts.getfield
local getattr = nuts.getattr
local getlist = nuts.getlist
local getchar = nuts.getchar
local getnext = nuts.getnext
-local setfield = nuts.setfield
local setattr = nuts.setattr
local setlink = nuts.setlink
local setchar = nuts.setchar
@@ -136,6 +134,8 @@ end)
--
-- tasks.appendaction("processors", "words", "nodes.injectspans")
--
+-- local pdfpageliteral = nuts.pool.pdfpageliteral
+--
-- local function injectspans(head)
-- local done = false
-- for n in traverse_nodes(tonut(head)) do
@@ -144,8 +144,8 @@ end)
-- local a = getattr(n,a_hyphenated)
-- if a then
-- local str = codes[a]
--- local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
--- local e = new_pdfliteral("EMC")
+-- local b = pdfpageliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
+-- local e = pdfpageliteral("EMC")
-- insert_before(head,n,b)
-- insert_after(head,n,e)
-- done = true
diff --git a/tex/context/base/mkiv/node-aux.lua b/tex/context/base/mkiv/node-aux.lua
index c6b276337..84567068b 100644
--- a/tex/context/base/mkiv/node-aux.lua
+++ b/tex/context/base/mkiv/node-aux.lua
@@ -10,7 +10,8 @@ if not modules then modules = { } end modules ['node-aux'] = {
local type, tostring = type, tostring
-local nodes, node = nodes, node
+local nodes = nodes
+local context = context
local utfvalues = utf.values
@@ -34,11 +35,12 @@ local getlist = nuts.getlist
local getfont = nuts.getfont
local getchar = nuts.getchar
local getattr = nuts.getattr
-local getfield = nuts.getfield
local getboth = nuts.getboth
local getcomponents = nuts.getcomponents
local getwidth = nuts.getwidth
local setwidth = nuts.setwidth
+local getboxglue = nuts.getboxglue
+local setboxglue = nuts.setboxglue
local setfield = nuts.setfield
local setattr = nuts.setattr
@@ -456,9 +458,8 @@ local function rehpack(n,width)
local size = width or getwidth(n)
local temp = hpack_nodes(head,size,"exactly")
setwidth(n,size)
- setfield(n,"glue_set", getfield(temp,"glue_set"))
- setfield(n,"glue_sign", getfield(temp,"glue_sign"))
- setfield(n,"glue_order",getfield(temp,"glue_order"))
+ local set, order, sign = getboxglue(temp)
+ setboxglue(n,set,order,sign)
setlist(temp)
flush_node(temp)
return n
diff --git a/tex/context/base/mkiv/node-bck.lua b/tex/context/base/mkiv/node-bck.lua
index abb025b74..4ed5abe5e 100644
--- a/tex/context/base/mkiv/node-bck.lua
+++ b/tex/context/base/mkiv/node-bck.lua
@@ -26,7 +26,6 @@ local nodepool = nuts.pool
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
diff --git a/tex/context/base/mkiv/node-bck.mkiv b/tex/context/base/mkiv/node-bck.mkiv
index 58edab668..6bfc43d6a 100644
--- a/tex/context/base/mkiv/node-bck.mkiv
+++ b/tex/context/base/mkiv/node-bck.mkiv
@@ -21,7 +21,7 @@
\unprotect
-\registerctxluafile{node-bck}{1.001}
+\registerctxluafile{node-bck}{}
% \backgroundvbox[green] {\input tufte } \par
% \backgroundvbox[blue] {\input ward } \par
diff --git a/tex/context/base/mkiv/node-dir.lua b/tex/context/base/mkiv/node-dir.lua
index 6ee5cd4b8..59564ac93 100644
--- a/tex/context/base/mkiv/node-dir.lua
+++ b/tex/context/base/mkiv/node-dir.lua
@@ -26,14 +26,14 @@ nodes.is_mirrored = allocate {
-- RTT = false,
}
-nodes.is_rotated = allocate {
+nodes.is_rotated = allocate { -- used
-- TLT = false,
-- TRT = false,
-- LTL = false,
RTT = true, ["+RTT"] = true,
}
-nodes.textdir_is_parallel = allocate {
+nodes.textdir_is_parallel = allocate { -- used
TLT = {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
@@ -114,7 +114,7 @@ nodes.pardir_is_opposite = allocate {
},
}
-nodes.textdir_is_opposite = allocate {
+nodes.textdir_is_opposite = allocate { -- used
TLT = {
-- TLT = false,
TRT = true, ["+TRT"] = true,
@@ -168,7 +168,7 @@ nodes.glyphdir_is_opposite = allocate {
},
}
-nodes.pardir_is_equal = allocate {
+nodes.pardir_is_equal = allocate { -- used
TLT = {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
@@ -195,7 +195,7 @@ nodes.pardir_is_equal = allocate {
},
}
-nodes.textdir_is_equal = allocate {
+nodes.textdir_is_equal = allocate { -- used
TLT = {
TLT = true, ["+TLT"] = true,
-- TRT = false,
@@ -222,7 +222,7 @@ nodes.textdir_is_equal = allocate {
},
}
-nodes.glyphdir_is_equal = allocate {
+nodes.glyphdir_is_equal = allocate { -- used
TLT = {
TLT = true, ["+TLT"] = true,
TRT = true, ["+TRT"] = true,
@@ -290,14 +290,14 @@ nodes.glyphdir_is_orthogonal = allocate {
-- RTT = false
}
-nodes.dir_is_pop = allocate {
+nodes.dir_is_pop = allocate { -- used
["-TRT"] = true,
["-TLT"] = true,
["-LTL"] = true,
["-RTT"] = true,
}
-nodes.dir_negation = allocate {
+nodes.dir_negation = allocate { -- used
["-TRT"] = "+TRT",
["-TLT"] = "+TLT",
["-LTL"] = "+LTL",
diff --git a/tex/context/base/mkiv/node-fin.lua b/tex/context/base/mkiv/node-fin.lua
index ffb2ae49e..975eb0bec 100644
--- a/tex/context/base/mkiv/node-fin.lua
+++ b/tex/context/base/mkiv/node-fin.lua
@@ -19,7 +19,6 @@ local nuts = nodes.nuts
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
@@ -124,12 +123,6 @@ function nodes.installattributehandler(plugin)
return loadstripped(template)()
end
--- for the moment:
-
-local function copied(n)
- return copy_node(tonut(n))
-end
-
-- the injectors
local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
@@ -163,13 +156,13 @@ function states.finalize(namespace,attribute,head) -- is this one ok?
if id == hlist_code or id == vlist_code then
local content = getlist(head)
if content then
- local list = insert_node_before(content,content,copied(nsnone)) -- two return values
+ local list = insert_node_before(content,content,copy_node(nsnone)) -- two return values
if list ~= content then
setlist(head,list)
end
end
else
- head = insert_node_before(head,head,copied(nsnone))
+ head = insert_node_before(head,head,copy_node(nsnone))
end
return tonode(head), true, true
end
@@ -178,17 +171,15 @@ end
-- we need to deal with literals too (reset as well as oval)
-local function process(namespace,attribute,head,inheritance,default) -- one attribute
+local function process(attribute,head,inheritance,default) -- one attribute
local stack = head
local done = false
local check = false
local leader = nil
while stack do
local id = getid(stack)
- if id == glyph_code then
- check = true
- elseif id == disc_code then
- check = true -- no longer needed as we flatten replace
+ if id == glyph_code or id == disc_code then
+ check = true -- disc no longer needed as we flatten replace
elseif id == glue_code then
leader = getleader(stack)
if leader then
@@ -201,7 +192,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
if nstrigger and getattr(stack,nstrigger) then
local outer = getattr(stack,attribute)
if outer ~= inheritance then
- local list, ok = process(namespace,attribute,content,inheritance,outer)
+ local list, ok = process(attribute,content,inheritance,outer)
if content ~= list then
setlist(stack,list)
end
@@ -209,7 +200,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
done = true
end
else
- local list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(attribute,content,inheritance,default)
if content ~= list then
setlist(stack,list)
end
@@ -218,7 +209,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
end
end
else
- local list, ok = process(namespace,attribute,content,inheritance,default)
+ local list, ok = process(attribute,content,inheritance,default)
if content ~= list then
setlist(stack,list)
end
@@ -237,12 +228,12 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
if c then
if default and c == inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copied(nsdata[default]))
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
current = default
done = true
end
elseif current ~= c then
- head = insert_node_before(head,stack,copied(nsdata[c]))
+ head = insert_node_before(head,stack,copy_node(nsdata[c]))
current = c
done = true
end
@@ -259,7 +250,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
if nstrigger and getattr(stack,nstrigger) then
local outer = getattr(stack,attribute)
if outer ~= inheritance then
- local list, ok = process(namespace,attribute,leader,inheritance,outer)
+ local list, ok = process(attribute,leader,inheritance,outer)
if leader ~= list then
setleader(stack,list)
end
@@ -267,7 +258,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
done = true
end
else
- local list, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(attribute,leader,inheritance,default)
if leader ~= list then
setleader(stack,list)
end
@@ -276,7 +267,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
end
end
else
- local list, ok = process(namespace,attribute,leader,inheritance,default)
+ local list, ok = process(attribute,leader,inheritance,default)
if leader ~= list then
setleader(stack,list)
end
@@ -290,12 +281,12 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
end
elseif default and inheritance then
if current ~= default then
- head = insert_node_before(head,stack,copied(nsdata[default]))
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
current = 0
done = true
end
@@ -307,7 +298,7 @@ local function process(namespace,attribute,head,inheritance,default) -- one attr
end
states.process = function(namespace,attribute,head,default)
- local head, done = process(namespace,attribute,tonut(head),default)
+ local head, done = process(attribute,tonut(head),default)
return tonode(head), done
end
@@ -317,17 +308,16 @@ end
-- state changes while the main state stays the same (like two glyphs following
-- each other with the same color but different color spaces e.g. \showcolor)
-local function selective(namespace,attribute,head,inheritance,default) -- two attributes
+local function selective(attribute,head,inheritance,default) -- two attributes
+ -- local head = head
local stack = head
local done = false
local check = false
local leader = nil
while stack do
local id = getid(stack)
- if id == glyph_code then
- check = true
- elseif id == disc_code then
- check = true -- not needed when we flatten replace
+ if id == glyph_code or id == disc_code then
+ check = true -- disc no longer needed as we flatten replace
elseif id == glue_code then
leader = getleader(stack)
if leader then
@@ -340,7 +330,7 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
if nstrigger and getattr(stack,nstrigger) then
local outer = getattr(stack,attribute)
if outer ~= inheritance then
- local list, ok = selective(namespace,attribute,content,inheritance,outer)
+ local list, ok = selective(attribute,content,inheritance,outer)
if content ~= list then
setlist(stack,list)
end
@@ -348,7 +338,7 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
done = true
end
else
- local list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(attribute,content,inheritance,default)
if content ~= list then
setlist(stack,list)
end
@@ -357,7 +347,7 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
end
end
else
- local list, ok = selective(namespace,attribute,content,inheritance,default)
+ local list, ok = selective(attribute,content,inheritance,default)
if content ~= list then
setlist(stack,list)
end
@@ -377,7 +367,7 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
if default and c == inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
if ok then
done = true
@@ -385,9 +375,11 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
end
else
local s = getattr(stack,nsselector)
+ -- local s = nsforced or getattr(stack,nsselector)
if current ~= c or current_selector ~= s then
local data = nsdata[c]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or s or nsselector]))
+ -- head = insert_node_before(head,stack,copy_node(data[s or nsselector]))
current = c
current_selector = s
if ok then
@@ -398,9 +390,9 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
if leader then
-- begin nested
if nstrigger and getattr(stack,nstrigger) then
- local outer = getatribute(stack,attribute)
+ local outer = getattr(stack,attribute)
if outer ~= inheritance then
- local list, ok = selective(namespace,attribute,leader,inheritance,outer)
+ local list, ok = selective(attribute,leader,inheritance,outer)
if leader ~= list then
setleader(stack,list)
end
@@ -408,7 +400,7 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
done = true
end
else
- local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(attribute,leader,inheritance,default)
if leader ~= list then
setleader(stack,list)
end
@@ -417,7 +409,7 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
end
end
else
- local list, ok = selective(namespace,attribute,leader,inheritance,default)
+ local list, ok = selective(attribute,leader,inheritance,default)
if leader ~= list then
setleader(stack,list)
end
@@ -431,12 +423,12 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
elseif default and inheritance then
if current ~= default then
local data = nsdata[default]
- head = insert_node_before(head,stack,copied(data[nsforced or getattr(stack,nsselector) or nsselector]))
+ head = insert_node_before(head,stack,copy_node(data[nsforced or getattr(stack,nsselector) or nsselector]))
current = default
done = true
end
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
current, current_selector, done = 0, 0, true
end
check = false
@@ -447,8 +439,8 @@ local function selective(namespace,attribute,head,inheritance,default) -- two at
end
states.selective = function(namespace,attribute,head,default)
- local head, done = selective(namespace,attribute,tonut(head),default)
- return tonode(head), done
+ local head = selective(attribute,tonut(head),default)
+ return tonode(head), true
end
-- Ideally the next one should be merged with the previous but keeping it separate is
@@ -460,7 +452,7 @@ end
-- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not
-- entirely trivial and a generic solution is nicer (compares to the exporter).
-local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+local function stacked(attribute,head,default) -- no triggering, no inheritance, but list-wise
local stack = head
local done = false
local current = default or 0
@@ -485,16 +477,16 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
local p = current
current = a
- head = insert_node_before(head,stack,copied(nsdata[a]))
- local list = stacked(namespace,attribute,content,current) -- two return values
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ local list = stacked(attribute,content,current) -- two return values
if content ~= list then
setlist(stack,list)
end
- head, stack = insert_node_after(head,stack,copied(nsnone))
+ head, stack = insert_node_after(head,stack,copy_node(nsnone))
current = p
done = true
else
- local list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(attribute,content,current)
if content ~= list then
setlist(stack,list) -- only if ok
end
@@ -503,7 +495,7 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
end
end
else
- local list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(attribute,content,current)
if content ~= list then
setlist(stack,list) -- only if ok
end
@@ -520,13 +512,13 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
local a = getattr(stack,attribute)
if a then
if current ~= a then
- head = insert_node_before(head,stack,copied(nsdata[a]))
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
depth = depth + 1
current = a
done = true
end
if leader then
- local list, ok = stacked(namespace,attribute,content,current)
+ local list, ok = stacked(attribute,content,current)
if leader ~= list then
setleader(stack,list) -- only if ok
end
@@ -538,7 +530,7 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
elseif default > 0 then
--
elseif current > 0 then
- head = insert_node_before(head,stack,copied(nsnone))
+ head = insert_node_before(head,stack,copy_node(nsnone))
depth = depth - 1
current = 0
done = true
@@ -548,20 +540,20 @@ local function stacked(namespace,attribute,head,default) -- no triggering, no in
stack = getnext(stack)
end
while depth > 0 do
- head = insert_node_after(head,stack,copied(nsnone))
+ head = insert_node_after(head,stack,copy_node(nsnone))
depth = depth - 1
end
return head, done
end
states.stacked = function(namespace,attribute,head,default)
- local head, done = stacked(namespace,attribute,tonut(head),default)
+ local head, done = stacked(attribute,tonut(head),default)
return tonode(head), done
end
-- experimental
-local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+local function stacker(attribute,head,default) -- no triggering, no inheritance, but list-wise
-- nsbegin()
local stacked = false
@@ -589,15 +581,15 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
elseif nslistwise then
local a = getattr(current,attribute)
if a and attrib ~= a and nslistwise[a] then -- viewerlayer
- head = insert_node_before(head,current,copied(nsdata[a]))
- local list = stacker(namespace,attribute,content,a)
+ head = insert_node_before(head,current,copy_node(nsdata[a]))
+ local list = stacker(attribute,content,a)
if list ~= content then
setlist(current,list)
end
done = true
- head, current = insert_node_after(head,current,copied(nsnone))
+ head, current = insert_node_after(head,current,copy_node(nsnone))
else
- local list, ok = stacker(namespace,attribute,content,attrib)
+ local list, ok = stacker(attribute,content,attrib)
if content ~= list then
setlist(current,list)
end
@@ -606,7 +598,7 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
end
end
else
- local list, ok = stacker(namespace,attribute,content,default)
+ local list, ok = stacker(attribute,content,default)
if list ~= content then
setlist(current,list)
end
@@ -633,7 +625,7 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
done = true
if leader then
-- tricky as a leader has to be a list so we cannot inject before
- local list, ok = stacker(namespace,attribute,leader,attrib)
+ local list, ok = stacker(attribute,leader,attrib)
if ok then
done = true
end
@@ -647,21 +639,19 @@ local function stacker(namespace,attribute,head,default) -- no triggering, no in
current = getnext(current)
end
-if stacked then
-
- local n = nsend()
- while n do
- head = insert_node_after(head,previous,tonut(n))
- n = nsend()
+ if stacked then
+ local n = nsend()
+ while n do
+ head = insert_node_after(head,previous,tonut(n))
+ n = nsend()
+ end
end
-end
-
return head, done
end
states.stacker = function(namespace,attribute,head,default)
- local head, done = stacker(namespace,attribute,tonut(head),default)
+ local head, done = stacker(attribute,tonut(head),default)
nsreset()
return tonode(head), done
end
diff --git a/tex/context/base/mkiv/node-fin.mkiv b/tex/context/base/mkiv/node-fin.mkiv
index 6c5bf17f1..4f1ff2aba 100644
--- a/tex/context/base/mkiv/node-fin.mkiv
+++ b/tex/context/base/mkiv/node-fin.mkiv
@@ -18,8 +18,8 @@
\unprotect
-\registerctxluafile{node-shp}{1.001}
-\registerctxluafile{node-fin}{1.001} % we might generalize this one
+\registerctxluafile{node-shp}{}
+\registerctxluafile{node-fin}{} % we might generalize this one
% we might have two variants at some point (efficiency)
diff --git a/tex/context/base/mkiv/node-fnt.lua b/tex/context/base/mkiv/node-fnt.lua
index 8aa088f88..f846f996d 100644
--- a/tex/context/base/mkiv/node-fnt.lua
+++ b/tex/context/base/mkiv/node-fnt.lua
@@ -56,7 +56,7 @@ local getfield = nuts.getfield
----- getdisc = nuts.getdisc
local setchar = nuts.setchar
local setlink = nuts.setlink
-local setfield = nuts.setfield
+local setnext = nuts.setnext
local setprev = nuts.setprev
local isglyph = nuts.isglyph -- unchecked
@@ -71,6 +71,9 @@ local disc_code = nodecodes.disc
local boundary_code = nodecodes.boundary
local word_boundary = nodes.boundarycodes.word
+local protect_glyphs = nuts.protect_glyphs
+local unprotect_glyphs = nuts.unprotect_glyphs
+
local setmetatableindex = table.setmetatableindex
-- some tests with using an array of dynamics[id] and processes[id] demonstrated
@@ -140,28 +143,28 @@ local ligaturing = nuts.ligaturing
local kerning = nuts.kerning
-- -- -- this will go away
-
-local disccodes = nodes.disccodes
-local explicit_code = disccodes.explicit
-local automatic_code = disccodes.automatic
-local expanders = nil
-
-function fonts.setdiscexpansion(v)
- if v == nil or v == true then
- expanders = languages and languages.expanders
- elseif type(v) == "table" then
- expanders = v
- else
- expanders = false
- end
-end
-
-function fonts.getdiscexpansion()
- return expanders and true or false
-end
-
-fonts.setdiscexpansion(true)
-
+--
+-- local disccodes = nodes.disccodes
+-- local explicit_code = disccodes.explicit
+-- local automatic_code = disccodes.automatic
+-- local expanders = nil
+--
+-- function fonts.setdiscexpansion(v)
+-- if v == nil or v == true then
+-- expanders = languages and languages.expanders
+-- elseif type(v) == "table" then
+-- expanders = v
+-- else
+-- expanders = false
+-- end
+-- end
+--
+-- function fonts.getdiscexpansion()
+-- return expanders and true or false
+-- end
+--
+-- fonts.setdiscexpansion(true)
+--
-- -- -- till here
local function start_trace(head)
@@ -193,9 +196,9 @@ local function stop_trace(u,usedfonts,a,attrfonts,b,basefonts,r,redundant,e,expa
report_fonts("dynamics: %s",a > 0 and concat(keys(attrfonts)," ") or "none")
report_fonts("built-in: %s",b > 0 and b or "none")
report_fonts("removed : %s",r > 0 and r or "none")
-if expanders then
- report_fonts("expanded: %s",e > 0 and e or "none")
-end
+ -- if expanders then
+ -- report_fonts("expanded: %s",e > 0 and e or "none")
+ -- end
report_fonts()
end
@@ -212,8 +215,10 @@ function handlers.characters(head,groupcode,size,packtype,direction)
local done = false
local variants = nil
local redundant = nil
- local none = false
local nuthead = tonut(head)
+ local lastfont = nil
+ local lastproc = nil
+ local lastnone = nil
local a, u, b, r, e = 0, 0, 0, 0, 0
@@ -224,54 +229,91 @@ function handlers.characters(head,groupcode,size,packtype,direction)
-- There is no gain in checking for a single glyph and then having a fast path. On the
-- metafun manual (with some 2500 single char lists) the difference is just noise.
+ local function protectnone()
+ protect_glyphs(firstnone,lastnone)
+ firstnone = nil
+ end
+
+ local function setnone(n)
+ if firstnone then
+ protectnone()
+ end
+ if basefont then
+ basefont[2] = getprev(n)
+ basefont = false
+ end
+ if not firstnone then
+ firstnone = n
+ end
+ lastnone = n
+ end
+
+ local function setbase(n)
+ if firstnone then
+ protectnone()
+ end
+ if force_basepass then
+ if basefont then
+ basefont[2] = getprev(n)
+ end
+ b = b + 1
+ basefont = { n, false }
+ basefonts[b] = basefont
+ end
+ end
+
+ local function setnode(n,font,attr) -- we could use prevfont and prevattr when we set then first
+ if firstnone then
+ protectnone()
+ end
+ if basefont then
+ basefont[2] = getprev(n)
+ basefont = false
+ end
+ if attr > 0 then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local fd = setfontdynamics[font]
+ if fd then
+ used[attr] = fd[attr]
+ a = a + 1
+ end
+ end
+ else
+ local used = usedfonts[font]
+ if not used then
+ lastfont = font
+ lastproc = fontprocesses[font]
+ if lastproc then
+ usedfonts[font] = lastproc
+ u = u + 1
+ end
+ end
+ end
+ end
+
for n in traverse_char(nuthead) do
local font = getfont(n)
- local attr = (none and prevattr) or getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ -- local attr = (none and prevattr) or getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
if font ~= prevfont or attr ~= prevattr then
prevfont = font
prevattr = attr
variants = fontvariants[font]
- none = fontmodes[font] == "none"
- if none then
- -- skip
- -- variants = false
- protect_glyph(n)
+ local fontmode = fontmodes[font]
+ if fontmode == "none" then
+ setnone(n)
+ elseif fontmode == "base" then
+ setbase(n)
else
- if basefont then
- basefont[2] = getprev(n)
- end
- if attr > 0 then
- local used = attrfonts[font]
- if not used then
- used = { }
- attrfonts[font] = used
- end
- if not used[attr] then
- local fd = setfontdynamics[font]
- if fd then
- used[attr] = fd[attr]
- a = a + 1
- elseif force_basepass then
- b = b + 1
- basefont = { n, false }
- basefonts[b] = basefont
- end
- end
- else
- local used = usedfonts[font]
- if not used then
- local fp = fontprocesses[font]
- if fp then
- usedfonts[font] = fp
- u = u + 1
- elseif force_basepass then
- b = b + 1
- basefont = { n, false }
- basefonts[b] = basefont
- end
- end
- end
+ setnode(n,font,attr)
end
+ elseif firstnone then
+ lastnone = n
end
if variants then
local char = getchar(n)
@@ -309,6 +351,10 @@ function handlers.characters(head,groupcode,size,packtype,direction)
end
end
+ if firstnone then
+ protectnone()
+ end
+
if force_boundaryrun then
-- we can inject wordboundaries and then let the hyphenator do its work
@@ -376,49 +422,35 @@ function handlers.characters(head,groupcode,size,packtype,direction)
local none = false
for n in traverse_char(r) do
local font = getfont(n)
- local attr = (none and prevattr) or getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
+ local attr = getattr(n,0) or 0 -- zero attribute is reserved for fonts in context
if font ~= prevfont or attr ~= prevattr then
prevfont = font
prevattr = attr
- none = fontmodes[font] == "none" -- very unlikely that we run into disc nodes in none mode
- if none then
- -- skip
- -- variants = false
- protect_glyph(n)
- elseif attr > 0 then
- local used = attrfonts[font]
- if not used then
- used = { }
- attrfonts[font] = used
- end
- if not used[attr] then
- local fd = setfontdynamics[font]
- if fd then
- used[attr] = fd[attr]
- a = a + 1
- end
- end
+ local fontmode = fontmodes[font]
+ if fontmode == "none" then
+ setnone(n)
+ elseif fontmode == "base" then
+ setbase(n)
else
- local used = usedfonts[font]
- if not used then
- local fp = fontprocesses[font]
- if fp then
- usedfonts[font] = fp
- u = u + 1
- end
- end
+ setnode(n,font,attr)
end
+ elseif firstnone then
+ -- lastnone = n
+ lastnone = nil
end
-- we assume one font for now (and if there are more and we get into issues then
-- we can always remove the break)
break
end
- elseif expanders then
- local subtype = getsubtype(d)
- if subtype == automatic_code or subtype == explicit_code then
- expanders[subtype](d)
- e = e + 1
+ if firstnone then
+ protectnone()
end
+ -- elseif expanders then
+ -- local subtype = getsubtype(d)
+ -- if subtype == automatic_code or subtype == explicit_code then
+ -- expanders[subtype](d)
+ -- e = e + 1
+ -- end
end
end
@@ -432,11 +464,9 @@ function handlers.characters(head,groupcode,size,packtype,direction)
if u == 0 then
-- skip
elseif u == 1 then
- local font, processors = next(usedfonts)
- -- local attr = a == 0 and false or 0 -- 0 is the savest way
local attr = a > 0 and 0 or false -- 0 is the savest way
- for i=1,#processors do
- local h, d = processors[i](head,font,attr,direction)
+ for i=1,#lastproc do
+ local h, d = lastproc[i](head,lastfont,attr,direction)
if d then
if h then
head = h
@@ -449,7 +479,7 @@ function handlers.characters(head,groupcode,size,packtype,direction)
local attr = a > 0 and 0 or false -- 0 is the savest way
for font, processors in next, usedfonts do -- unordered
for i=1,#processors do
- local h, d = processors[i](head,font,attr,direction)
+ local h, d = processors[i](head,font,attr,direction,u)
if d then
if h then
head = h
@@ -478,7 +508,7 @@ function handlers.characters(head,groupcode,size,packtype,direction)
for font, dynamics in next, attrfonts do
for attribute, processors in next, dynamics do -- unordered, attr can switch in between
for i=1,#processors do
- local h, d = processors[i](head,font,attribute,direction)
+ local h, d = processors[i](head,font,attribute,direction,a)
if d then
if h then
head = h
@@ -549,19 +579,5 @@ function handlers.characters(head,groupcode,size,packtype,direction)
return head, true
end
-local d_protect_glyphs = nuts.protect_glyphs
-local d_unprotect_glyphs = nuts.unprotect_glyphs
-
-handlers.protectglyphs = function(n) return d_protect_glyphs (tonut(n)) end
-handlers.unprotectglyphs = function(n) return d_unprotect_glyphs(tonut(n)) end
-
--- function handlers.protectglyphs(h)
--- local h = tonut(h)
--- for n in traverse_id(disc_code,h) do
--- local pre, post, replace = getdisc(n)
--- if pre then d_protect_glyphs(pre) end
--- if post then d_protect_glyphs(post) end
--- if replace then d_protect_glyphs(replace) end
--- end
--- return d_protect_glyphs(h)
--- end
+handlers.protectglyphs = function(n) protect_glyphs (tonut(n)) return n, true end
+handlers.unprotectglyphs = function(n) unprotect_glyphs(tonut(n)) return n, true end
diff --git a/tex/context/base/mkiv/node-ini.lua b/tex/context/base/mkiv/node-ini.lua
index bdccf8cba..50da140ce 100644
--- a/tex/context/base/mkiv/node-ini.lua
+++ b/tex/context/base/mkiv/node-ini.lua
@@ -68,6 +68,7 @@ local allocate = utilities.storage.allocate
local formatcolumns = utilities.formatters.formatcolumns
local getsubtypes = node.subtypes
+local getvalues = node.values
-- local listcodes = allocate {
-- [0] = "unknown",
@@ -99,6 +100,13 @@ if not rulecodes[5] then
rulecodes[8] = "radical"
end
+-- local dircodes = mark(getsubtypes("dir"))
+
+dircodes = allocate {
+ [0] = "normal",
+ [1] = "cancel",
+}
+
-- local glyphcodes = allocate {
-- [0] = "character",
-- [1] = "glyph",
@@ -121,7 +129,7 @@ local glyphcodes = mark(getsubtypes("glyph"))
local disccodes = mark(getsubtypes("disc"))
--- local skipcodes = allocate {
+-- local gluecodes = allocate {
-- [ 0] = "userskip",
-- [ 1] = "lineskip",
-- [ 2] = "baselineskip",
@@ -150,7 +158,7 @@ local disccodes = mark(getsubtypes("disc"))
-- [103] = "gleaders",
-- }
-local skipcodes = mark(getsubtypes("glue"))
+local gluecodes = mark(getsubtypes("glue"))
-- local leadercodes = allocate {
-- [100] = "leaders",
@@ -276,12 +284,12 @@ local nodecodes = simplified(node.types())
local whatcodes = simplified(node.whatsits())
local usercodes = allocate {
- [ 97] = "attributes", -- a
+ [ 97] = "attribute", -- a
[100] = "number", -- d
[108] = "lua", -- l
- [110] = "nodes", -- n
+ [110] = "node", -- n
[115] = "string", -- s
- [116] = "tokens" -- t
+ [116] = "token" -- t
}
local noadoptions = allocate {
@@ -296,52 +304,93 @@ local noadoptions = allocate {
right = 0x14 + 0x08,
}
-skipcodes = allocate(swapped(skipcodes,skipcodes))
-boundarycodes = allocate(swapped(boundarycodes,boundarycodes))
-noadcodes = allocate(swapped(noadcodes,noadcodes))
-radicalcodes = allocate(swapped(radicalcodes,radicalcodes))
-nodecodes = allocate(swapped(nodecodes,nodecodes))
-whatcodes = allocate(swapped(whatcodes,whatcodes))
-listcodes = allocate(swapped(listcodes,listcodes))
-glyphcodes = allocate(swapped(glyphcodes,glyphcodes))
-kerncodes = allocate(swapped(kerncodes,kerncodes))
-penaltycodes = allocate(swapped(penaltycodes,penaltycodes))
-mathcodes = allocate(swapped(mathcodes,mathcodes))
-fillcodes = allocate(swapped(fillcodes,fillcodes))
-margincodes = allocate(swapped(margincodes,margincodes))
-disccodes = allocate(swapped(disccodes,disccodes))
-accentcodes = allocate(swapped(accentcodes,accentcodes))
-fencecodes = allocate(swapped(fencecodes,fencecodes))
-rulecodes = allocate(swapped(rulecodes,rulecodes))
-leadercodes = allocate(swapped(leadercodes,leadercodes))
-usercodes = allocate(swapped(usercodes,usercodes))
-noadoptions = allocate(swapped(noadoptions,noadoptions))
-
-nodes.skipcodes = skipcodes
-nodes.boundarycodes = boundarycodes
-nodes.noadcodes = noadcodes
-nodes.nodecodes = nodecodes
-nodes.whatcodes = whatcodes
-nodes.listcodes = listcodes
-nodes.glyphcodes = glyphcodes
-nodes.kerncodes = kerncodes
-nodes.penaltycodes = penaltycodes
-nodes.mathcodes = mathcodes
-nodes.fillcodes = fillcodes
-nodes.margincodes = margincodes
-nodes.disccodes = disccodes
-nodes.accentcodes = accentcodes
-nodes.radicalcodes = radicalcodes
-nodes.fencecodes = fencecodes
-nodes.rulecodes = rulecodes
-nodes.leadercodes = leadercodes
-nodes.usercodes = usercodes
-nodes.noadoptions = noadoptions
-
-nodes.gluecodes = skipcodes -- more official
-nodes.whatsitcodes = whatcodes -- more official
+-- local directionvalues = mark(getvalues("dir"))
+-- local gluevalues = mark(getvalues("glue"))
+-- local pdfliteralvalues = mark(getvalues("pdf_literal"))
+
+local dirvalues = allocate {
+ [0] = "TLT",
+ [1] = "TRT",
+ [2] = "LTL",
+ [3] = "RTT",
+}
+
+local gluevalues = allocate {
+ [0] = "normal",
+ [1] = "fi",
+ [2] = "fil",
+ [3] = "fill",
+ [4] = "filll",
+}
+
+local pdfliteralvalues = allocate {
+ [0] = "origin",
+ [1] = "page",
+ [2] = "always",
+ [3] = "raw",
+ [4] = "text",
+ [5] = "font",
+ [6] = "special",
+}
+
+gluecodes = allocate(swapped(gluecodes,gluecodes))
+dircodes = allocate(swapped(dircodes,dircodes))
+boundarycodes = allocate(swapped(boundarycodes,boundarycodes))
+noadcodes = allocate(swapped(noadcodes,noadcodes))
+radicalcodes = allocate(swapped(radicalcodes,radicalcodes))
+nodecodes = allocate(swapped(nodecodes,nodecodes))
+whatcodes = allocate(swapped(whatcodes,whatcodes))
+listcodes = allocate(swapped(listcodes,listcodes))
+glyphcodes = allocate(swapped(glyphcodes,glyphcodes))
+kerncodes = allocate(swapped(kerncodes,kerncodes))
+penaltycodes = allocate(swapped(penaltycodes,penaltycodes))
+mathcodes = allocate(swapped(mathcodes,mathcodes))
+fillcodes = allocate(swapped(fillcodes,fillcodes))
+margincodes = allocate(swapped(margincodes,margincodes))
+disccodes = allocate(swapped(disccodes,disccodes))
+accentcodes = allocate(swapped(accentcodes,accentcodes))
+fencecodes = allocate(swapped(fencecodes,fencecodes))
+rulecodes = allocate(swapped(rulecodes,rulecodes))
+leadercodes = allocate(swapped(leadercodes,leadercodes))
+usercodes = allocate(swapped(usercodes,usercodes))
+noadoptions = allocate(swapped(noadoptions,noadoptions))
+dirvalues = allocate(swapped(dirvalues,dirvalues))
+gluevalues = allocate(swapped(gluevalues,gluevalues))
+pdfliteralvalues = allocate(swapped(pdfliteralvalues,pdfliteralvalues))
+
+nodes.gluecodes = gluecodes
+nodes.dircodes = dircodes
+nodes.boundarycodes = boundarycodes
+nodes.noadcodes = noadcodes
+nodes.nodecodes = nodecodes
+nodes.whatcodes = whatcodes
+nodes.listcodes = listcodes
+nodes.glyphcodes = glyphcodes
+nodes.kerncodes = kerncodes
+nodes.penaltycodes = penaltycodes
+nodes.mathcodes = mathcodes
+nodes.fillcodes = fillcodes
+nodes.margincodes = margincodes
+nodes.disccodes = disccodes
+nodes.accentcodes = accentcodes
+nodes.radicalcodes = radicalcodes
+nodes.fencecodes = fencecodes
+nodes.rulecodes = rulecodes
+nodes.leadercodes = leadercodes
+nodes.usercodes = usercodes
+nodes.noadoptions = noadoptions
+nodes.dirvalues = dirvalues
+nodes.gluevalues = gluevalues
+nodes.pdfliteralvalues = pdfliteralvalues
+
+nodes.skipcodes = gluecodes -- more friendly
+nodes.directioncodes = dircodes -- more friendly
+nodes.whatsitcodes = whatcodes -- more official
nodes.marginkerncodes = margincodes
nodes.discretionarycodes = disccodes
+nodes.directionvalues = dirvalues -- more friendly
+nodes.skipvalues = gluevalues -- more friendly
+nodes.literalvalues = pdfliteralvalues -- more friendly
listcodes.row = listcodes.alignment
listcodes.column = listcodes.alignment
@@ -350,6 +399,8 @@ kerncodes.kerning = kerncodes.fontkern
kerncodes.italiccorrection = kerncodes.italiccorrection or 1 -- new
+pdfliteralvalues.direct = pdfliteralvalues.always
+
nodes.codes = allocate { -- mostly for listing
glue = skipcodes,
boundary = boundarycodes,
diff --git a/tex/context/base/mkiv/node-ini.mkiv b/tex/context/base/mkiv/node-ini.mkiv
index 369b06ab2..8f1079163 100644
--- a/tex/context/base/mkiv/node-ini.mkiv
+++ b/tex/context/base/mkiv/node-ini.mkiv
@@ -17,25 +17,26 @@
\newcount\filterstate \filterstate\plusone % hm, public
-\registerctxluafile{node-ini}{1.001}
-\registerctxluafile{node-met}{1.001}
-\registerctxluafile{node-nut}{1.001}
-\registerctxluafile{node-res}{1.001}
-\registerctxluafile{node-ppt}{1.001} % experimental
-\registerctxluafile{node-dir}{1.001}
-\registerctxluafile{node-aux}{1.001}
-\registerctxluafile{node-tst}{1.001}
-\registerctxluafile{node-tra}{1.001} % we might split it off (module)
-\registerctxluafile{node-snp}{1.001}
-\registerctxluafile{node-tsk}{1.001}
-\registerctxluafile{node-tex}{1.001}
-\registerctxluafile{node-pro}{1.001}
-\registerctxluafile{node-ser}{1.001}
-\registerctxluafile{node-ext}{1.001}
-\registerctxluafile{node-acc}{1.001} % experimental
-%registerctxluafile{node-prp}{1.001} % makes no sense (yet)
-\registerctxluafile{node-ppt}{1.001}
-\registerctxluafile{node-scn}{1.001}
+\registerctxluafile{node-ini}{}
+\registerctxluafile{node-met}{}
+\registerctxluafile{node-nut}{}
+\registerctxluafile{node-res}{}
+\registerctxluafile{node-ppt}{} % experimental
+\registerctxluafile{node-dir}{}
+\registerctxluafile{node-aux}{}
+\registerctxluafile{node-tst}{}
+\registerctxluafile{node-tra}{} % we might split it off (module)
+\registerctxluafile{node-snp}{}
+\registerctxluafile{node-tsk}{}
+\registerctxluafile{node-tex}{}
+\registerctxluafile{node-pro}{}
+\registerctxluafile{node-ser}{}
+\registerctxluafile{node-ext}{}
+\registerctxluafile{node-acc}{} % experimental
+%registerctxluafile{node-prp}{} % makes no sense (yet)
+\registerctxluafile{node-ppt}{}
+\registerctxluafile{node-scn}{}
+\registerctxluafile{node-syn}{}
\newcount\c_node_tracers_show_box % box number
diff --git a/tex/context/base/mkiv/node-ltp.lua b/tex/context/base/mkiv/node-ltp.lua
index 22a4799ad..865f69c2c 100644
--- a/tex/context/base/mkiv/node-ltp.lua
+++ b/tex/context/base/mkiv/node-ltp.lua
@@ -134,10 +134,10 @@ if not modules then modules = { } end modules ['node-par'] = {
]]--
+local tonumber = tonumber
local utfchar = utf.char
local write, write_nl = texio.write, texio.write_nl
local sub, formatters = string.sub, string.formatters
-local round, floor = math.round, math.floor
local insert, remove = table.insert, table.remove
-- local fonts, nodes, node = fonts, nodes, node -- too many locals
@@ -232,8 +232,8 @@ local setkern = nuts.setkern
local setdir = nuts.setdir
local setshift = nuts.setshift
local setwidth = nuts.setwidth
------ getheight = nuts.getheight
------ getdepth = nuts.getdepth
+----- setheight = nuts.setheight
+----- setdepth = nuts.setdepth
local slide_node_list = nuts.slide -- get rid of this, probably ok > 78.2
local find_tail = nuts.tail
@@ -282,9 +282,10 @@ local leaders_code = gluecodes.leaders
local localpar_code = nodecodes.localpar
-local kerning_code = kerncodes.kerning -- font kern
local userkern_code = kerncodes.userkern
local italickern_code = kerncodes.italiccorrection
+local fontkern_code = kerncodes.fontkern
+local accentkern_code = kerncodes.accentkern
local ligature_code = glyphcodes.ligature
@@ -344,8 +345,6 @@ local dir_pops = nodes.dir_is_pop
local dir_negations = nodes.dir_negation
local is_skipable = nuts.protrusion_skippable
-local a_fontkern = attributes.private('fontkern')
-
-- helpers --
-- It makes more sense to move the somewhat messy dir state tracking
@@ -774,7 +773,7 @@ local function add_to_width(line_break_dir,checked_expansion,s) -- split into tw
elseif id == kern_code then
local kern = getkern(s)
if kern ~= 0 then
- if checked_expansion and expand_kerns and (getsubtype(s) == kerning_code or getattr(a_fontkern)) then
+ if checked_expansion and expand_kerns and getsubtype(s) == fontkern_code then
local stretch, shrink = kern_stretch_shrink(s,kern)
if expand_kerns == "stretch" then
adjust_stretch = adjust_stretch + stretch
@@ -1491,7 +1490,7 @@ local function post_line_break(par)
break
elseif id == kern_code then
local subtype = getsubtype(next)
- if subtype ~= userkern_code and subtype ~= italickern_code and not getattr(next,a_fontkern) then
+ if subtype == fontkern_code or subtype == accentkern_code then
-- fontkerns and accent kerns as well as otf injections
break
end
@@ -2394,7 +2393,7 @@ function constructors.methods.basic(head,d)
local kern = getkern(current)
if kern ~= 0 then
active_width.size = active_width.size + kern
- if checked_expansion and expand_kerns and (getsubtype(current) == kerning_code or getattr(current,a_fontkern)) then
+ if checked_expansion and expand_kerns and getsubtype(current) == fontkern_code then
local stretch, shrink = kern_stretch_shrink(current,kern)
if expand_kerns == "stretch" then
active_width.adjust_stretch = active_width.adjust_stretch + stretch
@@ -2555,7 +2554,7 @@ do
write(target," ")
elseif id == kern_code then
local s = getsubtype(a)
- if s == userkern_code or s == italickern_code or getattr(a,a_fontkern) then
+ if s == fontkern_code or s == accentkern_code then
if verbose then
write(target,"[|]")
-- else
@@ -2850,7 +2849,7 @@ do
-- end
-- elseif id == kern_code then
-- local kern = getkern(current)
- -- if kern ~= 0 and getsubtype(current) == kerning_code then
+ -- if kern ~= 0 and getsubtype(current) == fontkern_code then
-- setkern(current,font_expand_ratio * kern)
-- end
-- end
@@ -2873,7 +2872,7 @@ do
-- end
-- elseif id == kern_code then
-- local kern = getkern(current)
- -- if kern ~= 0 and getsubtype(current) == kerning_code then
+ -- if kern ~= 0 and getsubtype(current) == fontkern_code then
-- setkern(current,font_expand_ratio * kern)
-- end
-- end
@@ -2971,7 +2970,7 @@ do
local kern = getkern(current)
if kern == 0 then
-- no kern
- elseif getsubtype(current) == kerning_code then -- check getkern(p)
+ elseif getsubtype(current) == fontkern_code then -- check getkern(p)
if cal_expand_ratio then
local stretch, shrink = kern_stretch_shrink(current,kern)
font_stretch = font_stretch + stretch
diff --git a/tex/context/base/mkiv/node-met.lua b/tex/context/base/mkiv/node-met.lua
index 9ebc8e411..12a9256bc 100644
--- a/tex/context/base/mkiv/node-met.lua
+++ b/tex/context/base/mkiv/node-met.lua
@@ -40,6 +40,11 @@ if not modules then modules = { } end modules ['node-nut'] = {
-- As lots of testing and experimenting was part of this project, I could not have
-- done without stacks of new \CD s and \DVD s. This time Porcupine Tree, No-Man
-- and Archive were came to rescue.
+--
+-- It all started with testing performance of:
+--
+-- node.getfield = metatable.__index
+-- node.setfield = metatable.__newindex
local type, select = type, select
local setmetatableindex = table.setmetatableindex
@@ -110,51 +115,22 @@ nodes.set_attribute = node.set_attribute
nodes.find_attribute = node.find_attribute
nodes.unset_attribute = node.unset_attribute
-nodes.protect_glyphs = node.protect_glyphs
nodes.protect_glyph = node.protect_glyph
+nodes.protect_glyphs = node.protect_glyphs
+nodes.unprotect_glyph = node.unprotect_glyph
nodes.unprotect_glyphs = node.unprotect_glyphs
nodes.kerning = node.kerning
nodes.ligaturing = node.ligaturing
nodes.mlist_to_hlist = node.mlist_to_hlist
-if not node.getwhd then
- local getfield = node.getfield
- function node.getwhd(n)
- return getfield(n,"width"), getfield(n,"height"), getfield(n,"depth")
- end
-end
-
-if not node.setwhd then
- local setfield = node.setfield
- function node.setwhd(n,w,h,d)
- setfield(n,"width",w or 0)
- setfield(n,"height",h or 0)
- setfield(n,"depth",d or 0)
- end
-end
-
-nodes.getwhd = node.getwhd
-nodes.setwhd = node.setwhd
-
nodes.effective_glue = node.effective_glue
nodes.getglue = node.getglue
nodes.setglue = node.setglue
nodes.is_zero_glue = node.is_zero_glue
--- if not gonuts or not node.getfield then
--- node.getfield = metatable.__index
--- node.setfield = metatable.__newindex
--- end
-
nodes.tonode = function(n) return n end
nodes.tonut = function(n) return n end
-local getfield = node.getfield
-local setfield = node.setfield
-
-local getattr = node.get_attribute
-local setattr = setfield
-
local n_getid = node.getid
local n_getlist = node.getlist
local n_getnext = node.getnext
@@ -162,30 +138,27 @@ local n_getprev = node.getprev
local n_getchar = node.getchar
local n_getfont = node.getfont
local n_getsubtype = node.getsubtype
-local n_setfield = node.setfield
local n_getfield = node.getfield
-local n_setattr = node.setattr
-local n_getattr = node.getattr
+local n_getattr = node.get_attribute
local n_getdisc = node.getdisc
local n_getleader = node.getleader
+local n_setfield = node.setfield
+local n_setattr = n_setfield
+
local n_setnext = node.setnext or -- always
function(c,n)
- setfield(c,"next",n)
+ n_setfield(c,"next",n)
end
local n_setprev = node.setprev or -- always
function(c,p)
- setfield(c,"prev",p)
+ n_setfield(c,"prev",p)
end
local n_setlist = node.setlist or -- always
function(c,l)
- setfield(c,"list",l)
+ n_setfield(c,"list",l)
end
local n_setlink = node.setlink or -- always
--- function(c1,c2)
--- if c1 then setfield(c1,"next",c2) end
--- if c2 then setfield(c2,"prev",c1) end
--- end
function(...)
-- not that fast but not used often anyway
local h = nil
@@ -194,8 +167,8 @@ local n_setlink = node.setlink or -- always
if not n then
-- go on
elseif h then
- setfield(h,"next",n)
- setfield(n,"prev",h)
+ n_setfield(h,"next",n)
+ n_setfield(n,"prev",h)
else
h = n
end
@@ -204,8 +177,8 @@ local n_setlink = node.setlink or -- always
end
local n_setboth = node.setboth or -- always
function(c,p,n)
- setfield(c,"prev",p)
- setfield(c,"next",n)
+ n_setfield(c,"prev",p)
+ n_setfield(c,"next",n)
end
nodes.setnext = n_setnext
@@ -230,6 +203,23 @@ nodes.getlist = n_getlist
nodes.getleader = n_getleader
nodes.getdisc = n_getdisc
+if not node.getwhd then
+ function node.getwhd(n)
+ return n_getfield(n,"width"), n_getfield(n,"height"), n_getfield(n,"depth")
+ end
+end
+
+if not node.setwhd then
+ function node.setwhd(n,w,h,d)
+ n_setfield(n,"width",w or 0)
+ n_setfield(n,"height",h or 0)
+ n_setfield(n,"depth",d or 0)
+ end
+end
+
+nodes.getwhd = node.getwhd
+nodes.setwhd = node.setwhd
+
nodes.is_char = node.is_char
nodes.ischar = node.is_char
@@ -670,38 +660,45 @@ end
nodes.keys = keys -- [id][subtype]
nodes.fields = nodefields -- (n)
--- temporary hack
+-- for the moment (pre 6380)
-if LUATEXVERSION <= 1.002 then
+if not nodes.unprotect_glyph then
- local get = tex.get
- local flush = node.free
+ local protect_glyph = nodes.protect_glyph
+ local protect_glyphs = nodes.protect_glyphs
+ local unprotect_glyph = nodes.unprotect_glyph
+ local unprotect_glyphs = nodes.unprotect_glyphs
- function tex.get(name,split)
- local s = get(name)
- if split == true then
- if s then
- local width = s.width
- local stretch = s.stretch
- local shrink = s.shrink
- local stretch_order = s.stretch_order
- local shrink_order = s.shrink_order
- flush(s)
- return width, stretch, shrink, stretch_order, shrink_order
- else
- return 0, 0, 0, 0, 0
- end
- elseif split == false then
- if s then
- local width = s.width
- flush(s)
- return width
- else
- return 0
- end
+ local getnext = nodes.getnext
+ local setnext = nodes.setnext
+
+ function nodes.protectglyphs(first,last)
+ if first == last then
+ return protect_glyph(first)
+ elseif last then
+ local nxt = getnext(last)
+ setnext(last)
+ local f, b = protect_glyphs(first)
+ setnext(last,nxt)
+ return f, b
+ else
+ return protect_glyphs(first)
+ end
+ end
+
+ function nodes.unprotectglyphs(first,last)
+ if first == last then
+ return unprotect_glyph(first)
+ elseif last then
+ local nxt = getnext(last)
+ setnext(last)
+ local f, b = unprotect_glyphs(first)
+ setnext(last,nxt)
+ return f, b
else
- return s
+ return unprotect_glyphs(first)
end
end
end
+
diff --git a/tex/context/base/mkiv/node-mig.mkiv b/tex/context/base/mkiv/node-mig.mkiv
index 143fd4466..010d009b3 100644
--- a/tex/context/base/mkiv/node-mig.mkiv
+++ b/tex/context/base/mkiv/node-mig.mkiv
@@ -40,7 +40,7 @@
\unprotect
-\registerctxluafile{node-mig}{1.001}
+\registerctxluafile{node-mig}{}
\newtoks\everyautomigratefootnotes
\newtoks\everyautomigratemarks
diff --git a/tex/context/base/mkiv/node-nut.lua b/tex/context/base/mkiv/node-nut.lua
index 7e5c3438b..3e9a08b48 100644
--- a/tex/context/base/mkiv/node-nut.lua
+++ b/tex/context/base/mkiv/node-nut.lua
@@ -115,106 +115,8 @@ nuts.tonut = tonut
nodes.tonode = tonode
nodes.tonut = tonut
--- getters
-
-if not direct.getwhd then
- local getfield = direct.getfield
- function direct.getwhd(n)
- return getfield(n,"width"), getfield(n,"height"), getfield(n,"depth")
- end
-end
-
-if not direct.setwhd then
- local setfield = direct.setfield
- function direct.setwhd(n,w,h,d)
- setfield(n,"width",w or 0)
- setfield(n,"height",h or 0)
- setfield(n,"depth",d or 0)
- end
-end
-
-if not direct.getcomponents then
-
- local getfield = direct.getfield
- local setfield = direct.setfield
- local setsubtype = direct.setsubtype
-
- local attributelist_code = nodecodes.attributelist
-
- function direct.getcomponents(n) return getfield(n,"components") end
- function direct.setcomponents(n,c) setfield(n,"components",c) end
- function direct.getkern(n) return getfield(n,"kern") end
- function direct.getwidth(n) return getfield(n,"width") end
- function direct.setwidth(n,w) return setfield(n,"width",w) end
- function direct.getheight(n) return getfield(n,"height") end
- function direct.setheight(n,h) return setfield(n,"height",h) end
- function direct.getdepth(n) return getfield(n,"depth") end
- function direct.setdepth(n,d) return setfield(n,"depth",d) end
- function direct.getshift(n) return getfield(n,"shift") end
- function direct.setshift(n,s) return setfield(n,"shift",s) end
- function direct.getpenalty(n) return getfield(n,"penalty") end
- function direct.setpenalty(n,p) setfield(n,"penalty",p) end
- function direct.getdir(n) return getfield(n,"dir") end
- function direct.setdir(n,p) setfield(n,"dir",p) end
- function direct.getlanguage(n) return getfield(n,"lang") end
- function direct.setlanguage(n,l) return setfield(n,"lang",l) end
- function direct.getattributelist(n) getfield(n,"attr") end
-
- function direct.getnucleus(n) return getfield(n,"nucleus") end
- function direct.setnucleus(n,p) return setfield(n,"nucleus",p) end
- function direct.getsup(n) return getfield(n,"sup") end
- function direct.setsup(n,p) return setfield(n,"sup",p) end
- function direct.getsub(n) return getfield(n,"sub") end
- function direct.setsub(n,p) return setfield(n,"sub",p) end
-
- function direct.setattributelist(n,a)
- if a and type(a) ~= attributelist_code then
- a = getfield(a,"attr")
- end
- setfield(n,"attr",a)
- end
-
- function direct.setkern(n,k,s)
- setfield(n,"kern",k)
- if s then
- setsubtype(n,s)
- end
- end
-
- function direct.setfont(n,f,c)
- setfield(n,"font",f)
- if c then
- setfield(n,"char",f)
- end
- end
-
- function direct.getoffsets(n)
- return getfield(n,"xoffset"), getfield(n,"yoffset")
- end
-
- function direct.setoffsets(n,x,y)
- if x then
- setfield(n,"xoffset",x)
- end
- if y then
- setfield(n,"yoffset",y)
- end
- end
-
-end
-
-if LUATEXVERSION < 1.005 then
- local getfield = direct.getfield
- function direct.getnucleus(n) return getfield(n,"nucleus") end
- function direct.getsub (n) return getfield(n,"sub") end
- function direct.getsup (n) return getfield(n,"sup") end
-end
-
--- if LUATEXVERSION < 1.004 then
--- local gc = direct.getcomponents
--- getcomponents = function(n) local c = gc(n) return c ~= 0 and c or nil end
--- end
-
+-- -- some tracing:
+--
-- local hash = table.setmetatableindex("number")
-- local ga = direct.get_attribute
-- function direct.get_attribute(n,a)
@@ -224,7 +126,7 @@ end
-- function nuts.reportattr()
-- inspect(hash)
-- end
-
+--
-- local function track(name)
-- local n = 0
-- local f = nuts[name]
@@ -236,13 +138,54 @@ end
-- return f(...)
-- end
-- end
-
+--
-- track("getfield")
--- setters
-
-- helpers
+if not direct.getfam then -- LUATEXVERSION < 1.070
+
+ local getfield = direct.getfield
+ local setfield = direct.setfield
+
+ direct.getfam = function(n) return getfield(n,"small_fam") end
+ direct.setfam = function(n,f) setfield(n,"small_fam",f) end
+
+end
+
+if not direct.getdirection then
+
+ local getdir = direct.getdir
+ local setdir = direct.setdir
+
+ direct.getdirection = function(n)
+ local d = getdir(n)
+ if d == "TLT" then return 0 end
+ if d == "+TLT" then return 0, false end
+ if d == "-TLT" then return 0, true end
+ if d == "TRT" then return 1 end
+ if d == "+TRT" then return 1, false end
+ if d == "-TRT" then return 1, true end
+ if d == "LTL" then return 2 end
+ if d == "+LTL" then return 2, false end
+ if d == "-LTL" then return 2, true end
+ if d == "RTT" then return 3 end
+ if d == "+RTT" then return 3, false end
+ if d == "-RTT" then return 3, true end
+ end
+
+ direct.setdirection = function(n,d,c)
+ if d == 0 then if c == true then setdir(n,"-TLT") elseif c == false then setdir(n,"+TLT") else setdir(n,"TLT") end
+ elseif d == 1 then if c == true then setdir(n,"-TRT") elseif c == false then setdir(n,"+TRT") else setdir(n,"TRT") end
+ elseif d == 2 then if c == true then setdir(n,"-LTL") elseif c == false then setdir(n,"+LTL") else setdir(n,"LTL") end
+ elseif d == 3 then if c == true then setdir(n,"-RTT") elseif c == false then setdir(n,"+RTT") else setdir(n,"RTT") end
+ else if c == true then setdir(n,"-TLT") elseif c == false then setdir(n,"+TLT") else setdir(n,"TLT") end end
+ end
+
+end
+
+local nuts = nodes.nuts
+
nuts.tostring = direct.tostring
nuts.copy = direct.copy
nuts.copy_node = direct.copy
@@ -288,8 +231,9 @@ nuts.has_attribute = direct.has_attribute
nuts.set_attribute = direct.set_attribute
nuts.unset_attribute = direct.unset_attribute
-nuts.protect_glyphs = direct.protect_glyphs
nuts.protect_glyph = direct.protect_glyph
+nuts.protect_glyphs = direct.protect_glyphs
+nuts.unprotect_glyph = direct.unprotect_glyph
nuts.unprotect_glyphs = direct.unprotect_glyphs
nuts.ligaturing = direct.ligaturing
nuts.kerning = direct.kerning
@@ -324,6 +268,8 @@ nuts.effective_glue = direct.effective_glue
nuts.getglue = direct.getglue
nuts.setglue = direct.setglue
+nuts.getboxglue = direct.getglue
+nuts.setboxglue = direct.setglue
nuts.getdisc = direct.getdisc
nuts.setdisc = direct.setdisc
@@ -352,6 +298,8 @@ nuts.getchar = direct.getchar
nuts.setchar = direct.setchar
nuts.getfont = direct.getfont
nuts.setfont = direct.setfont
+nuts.getfam = direct.getfam
+nuts.setfam = direct.setfam
nuts.getboth = direct.getboth
nuts.setboth = direct.setboth
@@ -387,6 +335,9 @@ nuts.setkern = direct.setkern
nuts.getdir = direct.getdir
nuts.setdir = direct.setdir
+nuts.getdirection = direct.getdirection
+nuts.setdirection = direct.setdirection
+
nuts.getpenalty = direct.getpenalty
nuts.setpenalty = direct.setpenalty
@@ -415,17 +366,17 @@ local d_setboth = direct.setboth
local d_getboth = direct.getboth
local function remove(head,current,free_too)
- local t = current
- head, current = d_remove_node(head,current)
- if not t then
- -- forget about it
- elseif free_too then
- d_flush_node(t)
- t = nil
- else
- d_setboth(t) -- (t,nil,nil)
+ if current then
+ local h, c = d_remove_node(head,current)
+ if free_too then
+ d_flush_node(current)
+ return h, c
+ else
+ d_setboth(current)
+ return h, c, current
+ end
end
- return head, current, t
+ return head, current
end
-- alias
@@ -446,12 +397,6 @@ function nuts.replace(head,current,new) -- no head returned if false
head, current, new = false, head, current
end
local prev, next = d_getboth(current)
--- if next then
--- d_setlink(new,next)
--- end
--- if prev then
--- d_setlink(prev,new)
--- end
if prev or next then
d_setlink(prev,new,next)
end
@@ -782,7 +727,7 @@ nodes.properties = {
}
------.set_properties_mode(true,false) -- shallow copy ... problem: in fonts we then affect the originals too
-direct.set_properties_mode(true,true) -- create metatable, slower but needed for font-inj.lua (unless we use an intermediate table)
+direct.set_properties_mode(true,true) -- create metatable, slower but needed for font-otj.lua (unless we use an intermediate table)
-- todo:
--
@@ -858,8 +803,113 @@ end
-- here:
-nodes.set_synctex_line = node.set_synctex_line
-nodes.set_synctex_tag = node.set_synctex_tag
-
nuts.get_synctex_fields = direct.get_synctex_fields
nuts.set_synctex_fields = direct.set_synctex_fields
+
+-- for now
+
+nodes.uses_font = nodes.uses_font
+nuts.uses_font = direct.uses_font
+
+if not nuts.uses_font then
+
+ local glyph_code = nodecodes.glyph
+ local getdisc = nuts.getdisc
+ local getfont = nuts.getfont
+ local traverse_id = nuts.traverse_id
+ local tonut = nodes.tonut
+
+ function nuts.uses_font(n,font)
+ local pre, post, replace = getdisc(n)
+ if pre then
+ -- traverse_char
+ for n in traverse_id(glyph_code,pre) do
+ if getfont(n) == font then
+ return true
+ end
+ end
+ end
+ if post then
+ for n in traverse_id(glyph_code,post) do
+ if getfont(n) == font then
+ return true
+ end
+ end
+ end
+ if replace then
+ for n in traverse_id(glyph_code,replace) do
+ if getfont(n) == font then
+ return true
+ end
+ end
+ end
+ return false
+ end
+
+ function nodes.uses_font(n,font)
+ return nuts.uses_font(tonut(n),font)
+ end
+
+end
+
+-- for the moment (pre 6380)
+
+if not nuts.unprotect_glyph then
+
+ local protect_glyph = nuts.protect_glyph
+ local protect_glyphs = nuts.protect_glyphs
+ local unprotect_glyph = nuts.unprotect_glyph
+ local unprotect_glyphs = nuts.unprotect_glyphs
+
+ local getnext = nuts.getnext
+ local setnext = nuts.setnext
+
+ function nuts.protectglyphs(first,last)
+ if first == last then
+ return protect_glyph(first)
+ elseif last then
+ local nxt = getnext(last)
+ setnext(last)
+ local f, b = protect_glyphs(first)
+ setnext(last,nxt)
+ return f, b
+ else
+ return protect_glyphs(first)
+ end
+ end
+
+ function nuts.unprotectglyphs(first,last)
+ if first == last then
+ return unprotect_glyph(first)
+ elseif last then
+ local nxt = getnext(last)
+ setnext(last)
+ local f, b = unprotect_glyphs(first)
+ setnext(last,nxt)
+ return f, b
+ else
+ return unprotect_glyphs(first)
+ end
+ end
+
+end
+
+if LUATEXFUNCTIONALITY < 6384 then -- LUATEXVERSION < 1.070
+
+ local getfield = nuts.getfield
+ local setfield = nuts.setfield
+
+ function nuts.getboxglue(n,glue_set,glue_order,glue_sign)
+ return
+ getfield(n,"glue_set"),
+ getfield(n,"glue_order"),
+ getfield(n,"glue_sign")
+ end
+
+ function nuts.setboxglue(n,glue_set,glue_order,glue_sign)
+ setfield(n,"glue_set", glue_set or 0)
+ setfield(n,"glue_order",glue_order or 0)
+ setfield(n,"glue_sign", glue_sign or 0)
+ end
+
+end
diff --git a/tex/context/base/mkiv/node-pag.mkiv b/tex/context/base/mkiv/node-pag.mkiv
index b8ed197fe..970939dcf 100644
--- a/tex/context/base/mkiv/node-pag.mkiv
+++ b/tex/context/base/mkiv/node-pag.mkiv
@@ -15,6 +15,6 @@
\unprotect
-\registerctxluafile{node-pag}{1.001}
+\registerctxluafile{node-pag}{}
\protect \endinput
diff --git a/tex/context/base/mkiv/node-pro.lua b/tex/context/base/mkiv/node-pro.lua
index 3251b0133..4509bac18 100644
--- a/tex/context/base/mkiv/node-pro.lua
+++ b/tex/context/base/mkiv/node-pro.lua
@@ -71,15 +71,16 @@ processors.enabled = true -- this will become a proper state (like trackers)
do
- local has_glyph = nodes.has_glyph
+ local has_glyph = nodes.has_glyph
+ local count_nodes = nodes.countall
function processors.pre_linebreak_filter(head,groupcode) -- ,size,packtype,direction
local found = force_processors or has_glyph(head)
if found then
if trace_callbacks then
- local before = nodes.count(head,true)
+ local before = count_nodes(head,true)
local head, done = actions(head,groupcode) -- ,size,packtype,direction
- local after = nodes.count(head,true)
+ local after = count_nodes(head,true)
if done then
tracer("pre_linebreak","changed",head,groupcode,before,after,true)
else
@@ -91,7 +92,7 @@ do
return done and head or true
end
elseif trace_callbacks then
- local n = nodes.count(head,false)
+ local n = count_nodes(head,false)
tracer("pre_linebreak","no chars",head,groupcode,n,n)
end
return true
@@ -101,9 +102,9 @@ do
local found = force_processors or has_glyph(head)
if found then
if trace_callbacks then
- local before = nodes.count(head,true)
+ local before = count_nodes(head,true)
local head, done = actions(head,groupcode,size,packtype,direction,attributes)
- local after = nodes.count(head,true)
+ local after = count_nodes(head,true)
if done then
tracer("hpack","changed",head,groupcode,before,after,true)
else
@@ -115,7 +116,7 @@ do
return done and head or true
end
elseif trace_callbacks then
- local n = nodes.count(head,false)
+ local n = count_nodes(head,false)
tracer("hpack","no chars",head,groupcode,n,n)
end
return true
@@ -167,7 +168,8 @@ end
do
- local actions = tasks.actions("finalizers") -- head, where
+ local actions = tasks.actions("finalizers") -- head, where
+ local count_nodes = nodes.countall
-- beware, these are packaged boxes so no first_glyph test
-- maybe some day a hash with valid groupcodes
@@ -178,9 +180,9 @@ do
function processors.post_linebreak_filter(head,groupcode)
if trace_callbacks then
- local before = nodes.count(head,true)
+ local before = count_nodes(head,true)
local head, done = actions(head,groupcode)
- local after = nodes.count(head,true)
+ local after = count_nodes(head,true)
if done then
tracer("post_linebreak","changed",head,groupcode,before,after,true)
else
diff --git a/tex/context/base/mkiv/node-ref.lua b/tex/context/base/mkiv/node-ref.lua
index b313a00b6..1ec77e83d 100644
--- a/tex/context/base/mkiv/node-ref.lua
+++ b/tex/context/base/mkiv/node-ref.lua
@@ -16,6 +16,7 @@ if not modules then modules = { } end modules ['node-ref'] = {
-- is grouplevel still used?
+local tonumber = tonumber
local concat = table.concat
local attributes, nodes, node = attributes, nodes, node
@@ -51,7 +52,6 @@ local tonode = nuts.tonode
local tonut = nuts.tonut
local getfield = nuts.getfield
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setnext = nuts.setnext
local setprev = nuts.setprev
@@ -69,6 +69,7 @@ local getsubtype = nuts.getsubtype
local getwhd = nuts.getwhd
local getdir = nuts.getdir
local setshift = nuts.setshift
+local getboxglue = nuts.getboxglue
local hpack_list = nuts.hpack
local vpack_list = nuts.vpack
@@ -283,18 +284,16 @@ local function inject_range(head,first,last,reference,make,stack,parent,pardir,t
reference,pardir or "---",txtdir or "---",
tosequence(first,last,true),width,height,depth)
end
-if first == last and getid(parent) == vlist_code and getid(first) == hlist_code then
- if trace_areas then
- -- think of a button without \dontleavehmode in the mvl
- report_area("compensating for link in vlist")
- end
- setlink(result,getlist(first))
- setlist(first,result)
-else
- -- setlink(getprev(first),result)
- -- setlink(result,first)
- setlink(getprev(first),result,first)
-end
+ if first == last and getid(parent) == vlist_code and getid(first) == hlist_code then
+ if trace_areas then
+ -- think of a button without \dontleavehmode in the mvl
+ report_area("compensating for link in vlist")
+ end
+ setlink(result,getlist(first))
+ setlist(first,result)
+ else
+ setlink(getprev(first),result,first)
+ end
return head, last
end
else
@@ -320,10 +319,11 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
if prev and getid(prev) == glue_code and getsubtype(prev) == parfillskip_code then
width = dimensions(current,first,getprev(prev)) -- maybe not current as we already take care of it
else
+ local set, order, sign = getboxglue(current)
if moveright then
- width = width - getfield(first,"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
+ width = width - getfield(first,"stretch") * set * sign
end
- width = width - getfield(last,"stretch") * getfield(current,"glue_set") * getfield(current,"glue_sign")
+ width = width - getfield(last,"stretch") * set * sign
end
end
else
@@ -351,8 +351,6 @@ local function inject_list(id,current,reference,make,stack,pardir,txtdir)
setlist(current,result)
elseif moveright then -- brr no prevs done
-- result after first
- -- setlink(result,getnext(first))
- -- setlink(first,result)
setlink(first,result,getnext(first))
else
-- first after result
@@ -371,61 +369,30 @@ end
-- we need to do vlists differently
local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,txtdir) -- main
- if head then
- local current, first, last, firstdir, reference = head, nil, nil, nil, nil
- pardir = pardir or "==="
- txtdir = txtdir or "==="
- while current do
- local id = getid(current)
- if id == hlist_code or id == vlist_code then
- local r = getattr(current,attribute)
- -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
- -- test \goto{\TeX}[page(2)] test \gotobox{\hbox {x} \hbox {x}}[page(2)]
- -- if r and (not skip or r >) skip then -- maybe no > test
- -- inject_list(id,current,r,make,stack,pardir,txtdir)
- -- end
- if r then
- if not reference then
- reference, first, last, firstdir = r, current, current, txtdir
- elseif r == reference then
- -- same link
- last = current
- elseif (done[reference] or 0) == 0 then
- if not skip or r > skip then -- maybe no > test
- head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
- reference, first, last, firstdir = nil, nil, nil, nil
- end
- else
- reference, first, last, firstdir = r, current, current, txtdir
- end
- done[r] = (done[r] or 0) + 1
- end
- local list = getlist(current)
- if list then
- local h, ok
- h, ok , pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
- if h ~= current then
- setlist(current,h)
- end
- end
- if r then
- done[r] = done[r] - 1
- end
- elseif id == dir_code then
- txtdir = getdir(current)
- elseif id == localpar_code then
- pardir = getdir(current)
- elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
- --
- else
- local r = getattr(current,attribute)
- if not r then
- -- just go on, can be kerns
- elseif not reference then
+ local first, last, firstdir, reference
+ if not pardir then
+ pardir = "==="
+ end
+ if not texdir then
+ txtdir = "==="
+ end
+ local current = head
+ while current do
+ local id = getid(current)
+ if id == hlist_code or id == vlist_code then
+ local r = getattr(current,attribute)
+ -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
+ -- test \goto{\TeX}[page(2)] test \gotobox{\hbox {x} \hbox {x}}[page(2)]
+ -- if r and (not skip or r >) skip then -- maybe no > test
+ -- inject_list(id,current,r,make,stack,pardir,txtdir)
+ -- end
+ if r then
+ if not reference then
reference, first, last, firstdir = r, current, current, txtdir
elseif r == reference then
+ -- same link
last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
+ elseif (done[reference] or 0) == 0 then
if not skip or r > skip then -- maybe no > test
head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
reference, first, last, firstdir = nil, nil, nil, nil
@@ -433,53 +400,89 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
else
reference, first, last, firstdir = r, current, current, txtdir
end
+ done[r] = (done[r] or 0) + 1
end
- current = getnext(current)
- end
- if reference and (done[reference] or 0) == 0 then
- head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
- end
- end
- return head, true, pardir, txtdir
-end
-
-local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir) -- singular !
- if head then
- pardir = pardir or "==="
- txtdir = txtdir or "==="
- local current = head
- while current do
- local id = getid(current)
- if id == hlist_code or id == vlist_code then
- local r = getattr(current,attribute)
- if r and not done[r] then
- done[r] = true
- inject_list(id,current,r,make,stack,pardir,txtdir)
+ local list = getlist(current)
+ if list then
+ local h, ok
+ h, ok, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ if h ~= current then
+ setlist(current,h)
end
- local list = getlist(current)
- if list then
- local h = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
- if h ~= current then
- setlist(current,h)
- end
+ end
+ if r then
+ done[r] = done[r] - 1
+ end
+ elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
+ --
+ elseif id == dir_code then
+ txtdir = getdir(current)
+ elseif id == localpar_code then -- only test at begin
+ pardir = getdir(current)
+ else
+ local r = getattr(current,attribute)
+ if not r then
+ -- just go on, can be kerns
+ elseif not reference then
+ reference, first, last, firstdir = r, current, current, txtdir
+ elseif r == reference then
+ last = current
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
+ if not skip or r > skip then -- maybe no > test
+ head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+ reference, first, last, firstdir = nil, nil, nil, nil
end
- elseif id == dir_code then
- txtdir = getdir(current)
- elseif id == localpar_code then
- pardir = getdir(current)
else
- local r = getattr(current,attribute)
- if r and not done[r] then
- done[r] = true
- head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
- end
+ reference, first, last, firstdir = r, current, current, txtdir
end
- current = getnext(current)
end
+ current = getnext(current)
+ end
+ if reference and (done[reference] or 0) == 0 then
+ head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
end
- return head, true
+ return head, true, pardir, txtdir
end
+-- local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir) -- singular !
+-- if not pardir then
+-- pardir = "==="
+-- end
+-- if not texdir then
+-- txtdir = "==="
+-- end
+-- local current = head
+-- while current do
+-- local id = getid(current)
+-- if id == hlist_code or id == vlist_code then
+-- local r = getattr(current,attribute)
+-- if r and not done[r] then
+-- done[r] = true
+-- inject_list(id,current,r,make,stack,pardir,txtdir)
+-- end
+-- local list = getlist(current)
+-- if list then
+-- local h = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
+-- if h ~= current then
+-- setlist(current,h)
+-- end
+-- end
+-- elseif id == dir_code then
+-- txtdir = getdir(current)
+-- elseif id == localpar_code then
+-- pardir = getdir(current)
+-- else
+-- local r = getattr(current,attribute)
+-- if r and not done[r] then
+-- done[r] = true
+-- head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
+-- end
+-- end
+-- current = getnext(current)
+-- end
+-- return head, true
+-- end
+
-- tracing: todo: use predefined colors
local register_color = colors.register
@@ -555,8 +558,6 @@ local function colorize(width,height,depth,n,reference,what,sr,offset)
local text = addstring(what,sr,shift)
if text then
local kern = new_kern(-getwidth(text))
- -- setlink(kern,text)
- -- setlink(text,rule)
setlink(kern,text,rule)
return kern
end
@@ -669,17 +670,9 @@ local function makereference(width,height,depth,reference) -- height and depth a
end
end
--- function nodes.references.handler(head)
--- if topofstack > 0 then
--- return inject_areas(head,attribute,makereference,stack,done)
--- else
--- return head, false
--- end
--- end
-
function nodes.references.handler(head)
- if topofstack > 0 then
- head = tonut(head)
+ if head and topofstack > 0 then
+ local head = tonut(head)
local head, done = inject_areas(head,attribute,makereference,stack,done)
return tonode(head), done
else
@@ -791,7 +784,7 @@ local function makedestination(width,height,depth,reference)
end
-- function nodes.destinations.handler(head)
--- if topofstack > 0 then
+-- if head and topofstack > 0 then
-- return inject_area(head,attribute,makedestination,stack,done) -- singular
-- else
-- return head, false
@@ -799,8 +792,8 @@ end
-- end
function nodes.destinations.handler(head)
- if topofstack > 0 then
- head = tonut(head)
+ if head and topofstack > 0 then
+ local head = tonut(head)
local head, done = inject_areas(head,attribute,makedestination,stack,done)
return tonode(head), done
else
@@ -808,7 +801,6 @@ function nodes.destinations.handler(head)
end
end
-
-- will move
function references.mark(reference,h,d,view)
diff --git a/tex/context/base/mkiv/node-res.lua b/tex/context/base/mkiv/node-res.lua
index 8b7ec1a62..a6211e80e 100644
--- a/tex/context/base/mkiv/node-res.lua
+++ b/tex/context/base/mkiv/node-res.lua
@@ -7,7 +7,6 @@ if not modules then modules = { } end modules ['node-res'] = {
}
local gmatch, format = string.gmatch, string.format
-local tonumber, round = tonumber, math.round
--[[ldx--
The next function is not that much needed but in we use
@@ -26,6 +25,7 @@ local skipcodes = nodes.skipcodes
local kerncodes = nodes.kerncodes
local rulecodes = nodes.rulecodes
local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
local boundarycodes = nodes.boundarycodes
local usercodes = nodes.usercodes
@@ -61,35 +61,37 @@ setmetatable(userids, {
-- nuts overload
-local nuts = nodes.nuts
-local nutpool = { }
-nuts.pool = nutpool
-
-local tonut = nuts.tonut
-local tonode = nuts.tonode
-
-local getbox = nuts.getbox
-local getfield = nuts.getfield
-local getid = nuts.getid
-local getlist = nuts.getlist
-local getglue = nuts.getglue
-
-local setfield = nuts.setfield
-local setchar = nuts.setchar
-local setlist = nuts.setlist
-local setwhd = nuts.setwhd
-local setglue = nuts.setglue
-local setdisc = nuts.setdisc
-local setfont = nuts.setfont
-local setkern = nuts.setkern
-local setpenalty = nuts.setpenalty
-local setdir = nuts.setdir
-local setshift = nuts.setshift
-local setwidth = nuts.setwidth
-
-local copy_nut = nuts.copy
-local new_nut = nuts.new
-local flush_nut = nuts.flush
+local nuts = nodes.nuts
+local nutpool = { }
+nuts.pool = nutpool
+
+local tonut = nuts.tonut
+local tonode = nuts.tonode
+
+local getbox = nuts.getbox
+local getid = nuts.getid
+local getlist = nuts.getlist
+local getglue = nuts.getglue
+
+local setfield = nuts.setfield
+local setchar = nuts.setchar
+local setlist = nuts.setlist
+local setwhd = nuts.setwhd
+local setglue = nuts.setglue
+local setdisc = nuts.setdisc
+local setfont = nuts.setfont
+local setkern = nuts.setkern
+local setpenalty = nuts.setpenalty
+local setdir = nuts.setdir
+local setdirection = nuts.setdirection
+local setshift = nuts.setshift
+local setwidth = nuts.setwidth
+local setsubtype = nuts.setsubtype
+local setleader = nuts.setleader
+
+local copy_nut = nuts.copy
+local new_nut = nuts.new
+local flush_nut = nuts.flush
-- at some point we could have a dual set (the overhead of tonut is not much larger than
-- metatable associations at the lua/c end esp if we also take assignments into account
@@ -162,11 +164,11 @@ local special = register_nut(new_nut("whatsit",whatsitcodes.special))
local user_node = new_nut("whatsit",whatsitcodes.userdefined)
local user_number = register_nut(copy_nut(user_node)) setfield(user_number, "type",usercodes.number)
-local user_nodes = register_nut(copy_nut(user_node)) setfield(user_nodes, "type",usercodes.nodes)
+local user_nodes = register_nut(copy_nut(user_node)) setfield(user_nodes, "type",usercodes.node)
local user_string = register_nut(copy_nut(user_node)) setfield(user_string, "type",usercodes.string)
-local user_tokens = register_nut(copy_nut(user_node)) setfield(user_tokens, "type",usercodes.tokens)
+local user_tokens = register_nut(copy_nut(user_node)) setfield(user_tokens, "type",usercodes.token)
----- user_lua = register_nut(copy_nut(user_node)) setfield(user_lua, "type",usercodes.lua) -- in > 0.95
-local user_attributes = register_nut(copy_nut(user_node)) setfield(user_attributes,"type",usercodes.attributes)
+local user_attributes = register_nut(copy_nut(user_node)) setfield(user_attributes,"type",usercodes.attribute)
local left_margin_kern = register_nut(new_nut("margin_kern",0))
local right_margin_kern = register_nut(new_nut("margin_kern",1))
@@ -179,10 +181,22 @@ local rightskip = register_nut(new_nut("glue",skipcodes.rightskip))
local temp = register_nut(new_nut("temp",0))
local noad = register_nut(new_nut("noad"))
+local delimiter = register_nut(new_nut("delim"))
+local fence = register_nut(new_nut("fence"))
+local submlist = register_nut(new_nut("sub_mlist"))
+local accent = register_nut(new_nut("accent"))
+local radical = register_nut(new_nut("radical"))
+local fraction = register_nut(new_nut("fraction"))
+local subbox = register_nut(new_nut("sub_box"))
+local mathchar = register_nut(new_nut("math_char"))
+local mathtextchar = register_nut(new_nut("math_text_char"))
+local choice = register_nut(new_nut("choice"))
local boundary = register_nut(new_nut("boundary",boundarycodes.user))
local wordboundary = register_nut(new_nut("boundary",boundarycodes.word))
+local cleader = register_nut(copy_nut(glue)) setsubtype(cleader,gluecodes.cleaders) setglue(cleader,0,65536,0,2,0)
+
-- the dir field needs to be set otherwise crash:
local rule = register_nut(new_nut("rule")) setdir(rule, "TLT")
@@ -329,6 +343,18 @@ function nutpool.textdir(dir)
return t
end
+function nutpool.direction(dir,swap)
+ local t = copy_nut(textdir)
+ if not dir then
+ -- just a l2r start node
+ elseif swap then
+ setdirection(t,dir,true)
+ else
+ setdirection(t,dir,false)
+ end
+ return t
+end
+
function nutpool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
local n = copy_nut(rule)
if width or height or depth then
@@ -362,6 +388,17 @@ function nutpool.userrule(width,height,depth,dir) -- w/h/d == nil will let them
return n
end
+function nutpool.leader(width,list)
+ local n = copy_nut(cleader)
+ if width then
+ setwidth(n,width)
+ end
+ if list then
+ setleader(n,list)
+ end
+ return n
+end
+
function nutpool.latelua(code)
local n = copy_nut(latelua)
setfield(n,"string",code)
@@ -404,9 +441,19 @@ function nutpool.temp()
return copy_nut(temp)
end
-function nutpool.noad()
- return copy_nut(noad)
-end
+function nutpool.noad() return copy_nut(noad) end
+function nutpool.delimiter() return copy_nut(delimiter) end nutpool.delim = nutpool.delimiter
+function nutpool.fence() return copy_nut(fence) end
+function nutpool.submlist() return copy_nut(submlist) end
+function nutpool.noad() return copy_nut(noad) end
+function nutpool.fence() return copy_nut(fence) end
+function nutpool.accent() return copy_nut(accent) end
+function nutpool.radical() return copy_nut(radical) end
+function nutpool.fraction() return copy_nut(fraction) end
+function nutpool.subbox() return copy_nut(subbox) end
+function nutpool.mathchar() return copy_nut(mathchar) end
+function nutpool.mathtextchar() return copy_nut(mathtextchar) end
+function nutpool.choice() return copy_nut(choice) end
local function new_hlist(list,width,height,depth,shift)
local n = copy_nut(hlist)
diff --git a/tex/context/base/mkiv/node-rul.lua b/tex/context/base/mkiv/node-rul.lua
index 4ec651d3b..2b0368c2b 100644
--- a/tex/context/base/mkiv/node-rul.lua
+++ b/tex/context/base/mkiv/node-rul.lua
@@ -17,6 +17,9 @@ if not modules then modules = { } end modules ['node-rul'] = {
-- fill s withcolor .5white ;
-- draw boundingbox s withcolor yellow;
+local tonumber = tonumber
+
+local context = context
local attributes = attributes
local nodes = nodes
local properties = nodes.properties
@@ -28,13 +31,13 @@ local tonode = nuts.tonode
local tonut = nuts.tonut
local getfield = nuts.getfield
-local setfield = nuts.setfield
local setnext = nuts.setnext
local setprev = nuts.setprev
local setlink = nuts.setlink
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
+local getdir = nuts.getdir
local getattr = nuts.getattr
local setattr = nuts.setattr
local getfont = nuts.getfont
@@ -57,6 +60,7 @@ local traverse_id = nuts.traverse_id
local list_dimensions = nuts.rangedimensions
local hpack_nodes = nuts.hpack
local current_attr = nuts.current_attr
+local copy_list = nuts.copy_list
local nodecodes = nodes.nodecodes
local rulecodes = nodes.rulecodes
@@ -82,6 +86,7 @@ local nodepool = nuts.pool
local new_rule = nodepool.rule
local new_userrule = nodepool.userrule
local new_kern = nodepool.kern
+local new_leader = nodepool.leader
local n_tostring = nodes.idstostring
local n_tosequence = nodes.tosequence
@@ -125,13 +130,16 @@ local rules = nodes.rules or { }
nodes.rules = rules
rules.data = rules.data or { }
+local nutrules = nuts.rules or { }
+nuts.rules = nutrules -- not that many
+
storage.register("nodes/rules/data", rules.data, "nodes.rules.data")
local data = rules.data
-- we implement user rules here as it takes less code this way
-local function userrule(t,noattributes)
+local function usernutrule(t,noattributes)
local r = new_userrule(t.width or 0,t.height or 0,t.depth or 0)
if noattributes == false or noattributes == nil then
-- avoid fuzzy ones
@@ -139,7 +147,13 @@ local function userrule(t,noattributes)
setattrlist(r,current_attr())
end
properties[r] = t
- return tonode(r)
+ return r
+end
+
+nutrules.userrule = usernutrule
+
+local function userrule(t,noattributes)
+ return tonode(usernutrule(t,noattributes))
end
rules.userrule = userrule
@@ -208,8 +222,19 @@ local trace_ruled = false trackers.register("nodes.rules", function(v) trace_
local report_ruled = logs.reporter("nodes","rules")
function rules.define(settings)
- data[#data+1] = settings
- context(#data)
+ local nofdata = #data+1
+ data[nofdata] = settings
+ local text = settings.text
+ if text then
+ local b = nuts.takebox(text)
+ if b then
+ nodepool.register(b)
+ settings.text = getlist(b)
+ else
+ settings.text = nil
+ end
+ end
+ context(nofdata)
end
local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
@@ -242,7 +267,6 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
local method = d.method
local empty = d.empty == v_yes
local offset = d.offset
- local continue = d.continue
local dy = d.dy
local order = d.order
local max = d.max
@@ -322,18 +346,28 @@ local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but a
}
inject(tonut(r),w,ht,dp)
else
- for i=1,level do
- local ht = (offset+(i-1)*dy)*e + rulethickness - m
- local dp = -(offset+(i-1)*dy)*e + rulethickness + m
- local r = new_rule(w,ht,dp)
- if color then
- setattr(r,a_colormodel,colorspace)
- setattr(r,a_color,color)
- end
- if transparency then
- setattr(r,a_transparency,transparency)
+ local tx = d.text
+ if tx then
+ tx = copy_list(tx)
+ if d["repeat"] == v_yes then
+ tx = new_leader(w,tx)
end
+ local r = hpack_nodes(tx,w,"exactly")
inject(r,w,ht,dp)
+ else
+ for i=1,level do
+ local ht = (offset+(i-1)*dy)*e + rulethickness - m
+ local dp = -(offset+(i-1)*dy)*e + rulethickness + m
+ local r = new_rule(w,ht,dp)
+ if color then
+ setattr(r,a_colormodel,colorspace)
+ setattr(r,a_color,color)
+ end
+ if transparency then
+ setattr(r,a_transparency,transparency)
+ end
+ inject(r,w,ht,dp)
+ end
end
end
return head
@@ -617,6 +651,8 @@ implement {
{ "ta", "integer" },
{ "mp" },
{ "empty" },
+ { "text", "integer" },
+ { "repeat" },
}
}
}
diff --git a/tex/context/base/mkiv/node-rul.mkiv b/tex/context/base/mkiv/node-rul.mkiv
index bfdd17d30..643e93c42 100644
--- a/tex/context/base/mkiv/node-rul.mkiv
+++ b/tex/context/base/mkiv/node-rul.mkiv
@@ -71,7 +71,7 @@
%definesystemattribute[ruled]
%definesystemattribute[shifted]
-\registerctxluafile{node-rul}{1.001}
+\registerctxluafile{node-rul}{}
\installcorenamespace{bar}
\installcorenamespace{barindex}
@@ -108,8 +108,14 @@
\setuevalue\currentbar{\node_rules_direct{\currentbar}}%
\to \everydefinebar
+\newbox\b_node_rules
+
\unexpanded\def\node_rules_define
{\edef\p_node_rules_color{\barparameter\c!color}%
+ \edef\p_node_text{\barparameter\c!text}%
+ \ifx\p_node_text\empty\else
+ \setbox\b_node_rules\hbox{\p_node_text}%
+ \fi
\setevalue{\??barattribute\currentbar}{\number
\clf_definerule
continue {\barparameter\c!continue}%
@@ -125,6 +131,11 @@
offset \barparameter\c!offset\space % number
dy \barparameter\c!dy\space % number
empty {\barparameter\c!empty}%
+ \ifx\p_node_text\empty\else
+ % not that useful and efficient, more for testing something
+ text \b_node_rules
+ repeat {\barparameter\c!repeat}%
+ \fi
\relax}}
\unexpanded\def\node_rules_redefine#1%
diff --git a/tex/context/base/mkiv/node-scn.lua b/tex/context/base/mkiv/node-scn.lua
index 67a0badec..b294b3013 100644
--- a/tex/context/base/mkiv/node-scn.lua
+++ b/tex/context/base/mkiv/node-scn.lua
@@ -48,14 +48,13 @@ local spaceskip_code = gluecodes.spaceskip
local xspaceskip_code = gluecodes.xspaceskip
local leader_code = gluecodes.leaders
-local kerning_code = kerncodes.kern
+local fontkern_code = kerncodes.fontkern
local variables = interfaces.variables
local privateattributes = attributes.private
local a_runningtext = privateattributes('runningtext')
-local a_fontkern = privateattributes('fontkern')
local v_yes = variables.yes
local v_all = variables.all
@@ -173,7 +172,7 @@ local function processwords(attribute,data,flush,head,parent,skip) -- we have hl
if f then
l = n
end
- elseif id == kern_code and (getsubtype(n) == kerning_code or getattr(n,a_fontkern)) then
+ elseif id == kern_code and getsubtype(n) == fontkern_code then
if f then
l = n
end
@@ -276,7 +275,7 @@ local function processranges(attribute,flush,head,parent,depth,skip)
else
-- weird
end
- elseif id == kern_code and (getsubtype(n) == kerning_code or getattr(n,a_fontkern)) then
+ elseif id == kern_code and getsubtype(n) == fontkern_code then
if f then
l = n
end
diff --git a/tex/context/base/mkiv/node-ser.lua b/tex/context/base/mkiv/node-ser.lua
index f1be21f84..7ed2b8d00 100644
--- a/tex/context/base/mkiv/node-ser.lua
+++ b/tex/context/base/mkiv/node-ser.lua
@@ -106,6 +106,7 @@ nodes.ignorablefields = ignore
local function astable(n,sparse) -- not yet ok, might get obsolete anyway
n = tonode(n)
local f = getfields(n)
+ local t = { }
for i=1,#f do
local v = f[i]
local d = n[v]
diff --git a/tex/context/base/mkiv/node-shp.lua b/tex/context/base/mkiv/node-shp.lua
index 216e6462a..2e7a3529a 100644
--- a/tex/context/base/mkiv/node-shp.lua
+++ b/tex/context/base/mkiv/node-shp.lua
@@ -37,7 +37,6 @@ local tonode = nuts.tonode
local remove_node = nuts.remove
local traverse_nodes = nuts.traverse
-local getfield = nuts.getfield
local setfield = nuts.setfield
local setlink = nuts.setlink
local setprev = nuts.setprev
@@ -152,10 +151,10 @@ function handlers.cleanupbox(head)
return tonode(cleanup_flushed(tonut(head))), true
end
-local actions = tasks.actions("shipouts") -- no extra arguments
+local actions = tasks.actions("shipouts")
-function handlers.finalize(head) -- problem, attr loaded before node, todo ...
- return actions(head)
+function handlers.finalize(head,where) -- problem, attr loaded before node, todo ...
+ return actions(head,where)
end
-- handlers.finalize = actions
diff --git a/tex/context/base/mkiv/node-syn.lua b/tex/context/base/mkiv/node-syn.lua
index 1b8e07382..9d716c44a 100644
--- a/tex/context/base/mkiv/node-syn.lua
+++ b/tex/context/base/mkiv/node-syn.lua
@@ -6,23 +6,124 @@ if not modules then modules = { } end modules ['node-syn'] = {
license = "see context related readme files"
}
--- Because we have these fields in some node that are used by sunctex, I decided (because
--- some users seem to like that feature) to implement a variant that might work out better
--- for ConTeXt. This is experimental code. I don't use it myself so it will take a while
--- to mature. There will be some helpers that one can use in more complex situations like
--- included xml files.
+-- Because we have these fields in some node that are used by synctex, and because
+-- some users seem to like that feature, I decided to implement a variant that might
+-- work out better for ConTeXt. This is experimental code. I don't use it myself so
+-- it will take a while to mature. There will be some helpers that one can use in
+-- more complex situations like included xml files. Currently (somewhere else) we
+-- take care of valid files, that is: we prohibit access to files in the tree
+-- because we don't want users to mess up styles.
--
--- It is unclear how the output gets interpreted. For instance, we only need to be able to
--- go back to a place where text is entered, but still we need all that redundant box
--- wrapping.
+-- It is unclear how the output gets interpreted but by reverse engineering (and
+-- stripping) the file generated by generic synctex, I got there eventually. For
+-- instance, we only need to be able to go back to a place where text is entered,
+-- but still we need all that redundant box wrapping. Anyway, I was able to get a
+-- minimal output and cross my fingers that the parser used in editors is not
+-- changed in fundamental ways.
--
--- Possible optimizations: pack whole lines.
+-- I only tested SumatraPDF with SciTE, for which one needs to configure in the
+-- viewer:
+--
+-- InverseSearchCmdLine = c:\data\system\scite\wscite\scite.exe "%f" "-goto:%l" $
+--
+-- Unfortunately syntex always removes the files at the end and not at the start
+-- (this happens in synctexterminate) so we need to work around that by using an
+-- intermediate file. This is no big deal in context (which has a runner) but
+-- definitely not nice.
+--
+-- The visualizer code is only needed for testing so we don't use fancy colors or
+-- provide more detail. After all we're only interested in rendered source text
+-- anyway. We try to play safe which sometimes means that we'd better no go
+-- somewhere than go someplace wrong.
+--
+-- A previous version had a mode for exporting boxes and such but I removed that
+-- as it made no sense. Also, collecting output in a table was not faster than
+-- directly piping to the file, probably because the amount is not that large. We
+-- keep some left-overs commented.
+--
+-- A significate reduction in file size can be realized when reusing the same
+-- values. Actually that triggered the current approach in ConTeXt. In the latest
+-- synctex parser vertical positions can be repeated by an "=" sign but for some
+-- reason only for that field. It's probably trivial to do that for all of "w h d v
+-- h" but it currently not the case so I'll delay that till all are supported. (We
+-- could benefit a lot from such a repetition scheme but not much from a "v" alone
+-- which -alas- indicates that synctex is still mostly a latex targeted story.)
+--
+-- It's kind of hard to fight the parser because it really wants to go to some file
+-- but maybe some day I can figure it out. Some untagged text (in the pdf) somehow
+-- gets seen as part of the last box. Anonymous content is simply not part of the
+-- concept. Using a dummy name doesn't help either as the editor gets a signal to
+-- open that dummy. Even an empty filename doesn't work.
+--
+-- We output really simple and compact code, like:
+--
+-- SyncTeX Version:1
+-- Input:1:e:/tmp/oeps.tex
+-- Input:2:c:/data/develop/context/sources/klein.tex
+-- Output:pdf
+-- Magnification:1000
+-- Unit:1
+-- X Offset:0
+-- Y Offset:0
+-- Content:
+-- !160
+-- {1
+-- h0,0:0,0,0,0,0
+-- v0,0:0,55380990:39158276,55380990,0
+-- h2,1:4661756,9176901:27969941,655360,327680
+-- h2,2:4661756,10125967:26048041,655360,327680
+-- h2,3:30962888,10125967:1668809,655360,327680
+-- h2,3:4661756,11075033:23142527,655360,327680
+-- h2,4:28046650,11075033:4585047,655360,327680
+-- h2,4:4661756,12024099:22913954,655360,327680
+-- h2,5:27908377,12024099:4723320,655360,327680
+-- h2,5:4661756,12973165:22918783,655360,327680
+-- h2,6:27884864,12973165:4746833,655360,327680
+-- h2,6:4661756,13922231:18320732,655360,327680
+-- )
+-- ]
+-- !533
+-- }1
+-- Input:3:c:/data/develop/context/sources/ward.tex
+-- !57
+-- {2
+-- h0,0:0,0,0,0,0
+-- v0,0:0,55380990:39158276,55380990,0
+-- h3,1:4661756,9176901:18813145,655360,327680
+-- h3,2:23713999,9176901:8917698,655360,327680
+-- h3,2:4661756,10125967:10512978,655360,327680
+-- h3,3:15457206,10125967:17174491,655360,327680
+-- h3,3:4661756,11075033:3571223,655360,327680
+-- h3,4:8459505,11075033:19885281,655360,327680
+-- h3,5:28571312,11075033:4060385,655360,327680
+-- h3,5:4661756,12024099:15344870,655360,327680
+-- )
+-- ]
+-- !441
+-- }2
+-- !8
+-- Postamble:
+-- Count:22
+-- !23
+-- Post scriptum:
+--
+-- But for some reason, when the pdf file has some extra content (like page numbers)
+-- the main document is consulted. Bah. It would be nice to have a mode for *only*
+-- looking at marked areas. It somehow works not but maybe depends on the parser.
+--
+-- Supporting reuseable objects makes not much sense as these are often graphics or
+-- ornamental. They can not have hyperlinks etc (at least not without some hackery
+-- which I'm not willing to do) so basically they are sort of useless for text.
local type, rawset = type, rawset
local concat = table.concat
local formatters = string.formatters
+local replacesuffix, suffixonly, nameonly = file.replacesuffix, file.suffix, file.nameonly
+local openfile, renamefile, removefile = io.open, os.rename, os.remove
+
+local report_system = logs.reporter("system")
-local trace = false trackers.register("system.syntex.visualize", function(v) trace = v end)
+local tex = tex
local nuts = nodes.nuts
local tonut = nuts.tonut
@@ -35,19 +136,18 @@ local getnext = nuts.getnext
local getwhd = nuts.getwhd
local getwidth = nuts.getwidth
local getsubtype = nuts.getsubtype
-local getattr = nuts.getattr
local nodecodes = nodes.nodecodes
local kerncodes = nodes.kerncodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
local kern_code = nodecodes.kern
-local kern_disc = nodecodes.disc
-local rule_code = nodecodes.rule
------ math_code = nodecodes.math
+----- rule_code = nodecodes.rule
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
-local glyph_code = nodecodes.glyph
local fontkern_code = kerncodes.fontkern
local insert_before = nuts.insert_before
@@ -56,82 +156,131 @@ local insert_after = nuts.insert_after
local nodepool = nuts.pool
local new_latelua = nodepool.latelua
local new_rule = nodepool.rule
-local new_hlist = nodepool.hlist
+local new_kern = nodepool.kern
local getdimensions = nuts.dimensions
local getrangedimensions = nuts.rangedimensions
-local a_fontkern = attributes.private("fontkern")
-
local get_synctex_fields = nuts.get_synctex_fields
local set_synctex_fields = nuts.set_synctex_fields
-local set_syntex_tag = nodes.set_synctex_tag
-
-local getcount = tex.getcount
-local setcount = tex.setcount
+local set_synctex_line = tex.set_synctex_line
+local set_synctex_tag = tex.set_synctex_tag
+local force_synctex_tag = tex.force_synctex_tag
+local force_synctex_line = tex.force_synctex_line
+----- get_synctex_tag = tex.get_synctex_tag
+----- get_synctex_line = tex.get_synctex_line
+local set_synctex_mode = tex.set_synctex_mode
local getpos = function()
getpos = backends.codeinjections.getpos
return getpos()
end
-local f_glue = formatters["g%i,%i:%i,%i"]
-local f_glyph = formatters["x%i,%i:%i,%i"]
-local f_kern = formatters["k%i,%i:%i,%i:%i"]
-local f_rule = formatters["r%i,%i:%i,%i:%i,%i,%i"]
-local f_hlist = formatters["[%i,%i:%i,%i:%i,%i,%i"]
-local f_vlist = formatters["(%i,%i:%i,%i:%i,%i,%i"]
-local s_hlist = "]"
-local s_vlist = ")"
-local f_hvoid = formatters["h%i,%i:%i,%i:%i,%i,%i"]
-local f_vvoid = formatters["v%i,%i:%i,%i:%i,%i,%i"]
+local foundintree = resolvers.foundintree
+
+local eol = "\010"
+
+----- f_glue = formatters["g%i,%i:%i,%i\010"]
+----- f_glyph = formatters["x%i,%i:%i,%i\010"]
+----- f_kern = formatters["k%i,%i:%i,%i:%i\010"]
+----- f_rule = formatters["r%i,%i:%i,%i:%i,%i,%i\010"]
+----- f_form = formatters["f%i,%i,%i\010"]
+local z_hlist = "[0,0:0,0:0,0,0\010"
+local z_vlist = "(0,0:0,0:0,0,0\010"
+----- z_xform = "<0,0:0,0,0\010" -- or so
+local s_hlist = "]\010"
+local s_vlist = ")\010"
+----- s_xform = ">\010"
+local f_hlist_1 = formatters["h%i,%i:%i,%i:%i,%i,%i\010"]
+local f_hlist_2 = formatters["h%i,%i:%i,%s:%i,%i,%i\010"]
+local f_vlist_1 = formatters["v%i,%i:%i,%i:%i,%i,%i\010"]
+local f_vlist_2 = formatters["v%i,%i:%i,%s:%i,%i,%i\010"]
+
+local synctex = luatex.synctex or { }
+luatex.synctex = synctex
-local characters = fonts.hashes.characters
+-- status stuff
-local synctex = { }
-luatex.synctex = synctex
+local enabled = false
+local paused = 0
+local used = false
+local never = false
+
+-- get rid of overhead
+
+if tex.set_synctex_no_files then
+ tex.set_synctex_no_files(1)
+end
-- the file name stuff
local noftags = 0
local stnums = { }
-local sttags = table.setmetatableindex(function(t,name)
- noftags = noftags + 1
- t[name] = noftags
- stnums[noftags] = name
- return noftags
+local nofblocked = 0
+local blockedfilenames = { }
+local blockedsuffixes = {
+ mkii = true,
+ mkiv = true,
+ mkvi = true,
+ mkix = true,
+ mkxi = true,
+ -- lfg = true,
+}
+
+local sttags = table.setmetatableindex(function(t,name)
+ if blockedsuffixes[suffixonly(name)] then
+ -- Just so that I don't get the ones on my development tree.
+ nofblocked = nofblocked + 1
+ return 0
+ elseif blockedfilenames[nameonly(name)] then
+ -- So we can block specific files.
+ nofblocked = nofblocked + 1
+ return 0
+ elseif foundintree(name) then
+ -- One shouldn't edit styles etc this way.
+ nofblocked = nofblocked + 1
+ return 0
+ else
+ noftags = noftags + 1
+ t[name] = noftags
+ stnums[noftags] = name
+ return noftags
+ end
end)
-function synctex.setfilename(name)
- if set_syntex_tag and name then
- set_syntex_tag(sttags[name])
+function synctex.blockfilename(name)
+ blockedfilenames[nameonly(name)] = name
+end
+
+function synctex.setfilename(name,line)
+ if paused == 0 and force_synctex_tag and name then
+ force_synctex_tag(sttags[name])
+ if line then
+ force_synctex_line(line)
+ end
end
end
function synctex.resetfilename()
- if set_syntex_tag then
- local name = luatex.currentfile()
- if name then
- set_syntex_tag(name)
- end
+ if paused == 0 and force_synctex_tag then
+ force_synctex_tag(0)
+ force_synctex_line(0)
end
end
-- the node stuff
-local result = { }
-local r = 0
-local f = nil
-local nofsheets = 0
-local nofobjects = 0
-local last = 0
-local filesdone = 0
-local enabled = false
-local compact = true
+local filehandle = nil
+local nofsheets = 0
+local nofobjects = 0
+local last = 0
+local filesdone = 0
+local tmpfile = false
+local logfile = false
local function writeanchor()
- local size = f:seek("end")
- f:write("!" .. (size-last) .. "\n")
+ local size = filehandle:seek("end")
+ filehandle:write("!",size-last,eol)
last = size
end
@@ -139,291 +288,349 @@ local function writefiles()
local total = #stnums
if filesdone < total then
for i=filesdone+1,total do
- f:write("Input:"..i..":"..stnums[i].."\n")
+ filehandle:write("Input:",i,":",stnums[i],eol)
end
filesdone = total
end
end
+local function makenames()
+ logfile = replacesuffix(tex.jobname,"synctex")
+ tmpfile = replacesuffix(logfile,"syncctx")
+end
+
local function flushpreamble()
- local jobname = tex.jobname
- stnums[0] = jobname
- f = io.open(file.replacesuffix(jobname,"syncctx"),"w")
- f:write("SyncTeX Version:1\n")
- f:write("Input:0:"..jobname.."\n")
- writefiles()
- f:write("Output:pdf\n")
- f:write("Magnification:1000\n")
- f:write("Unit:1\n")
- f:write("X Offset:0\n")
- f:write("Y Offset:0\n")
- f:write("Content:\n")
- flushpreamble = writefiles
+ makenames()
+ filehandle = openfile(tmpfile,"wb")
+ if filehandle then
+ filehandle:write("SyncTeX Version:1",eol)
+ writefiles()
+ filehandle:write("Output:pdf",eol)
+ filehandle:write("Magnification:1000",eol)
+ filehandle:write("Unit:1",eol)
+ filehandle:write("X Offset:0",eol)
+ filehandle:write("Y Offset:0",eol)
+ filehandle:write("Content:",eol)
+ flushpreamble = function()
+ writefiles()
+ return filehandle
+ end
+ else
+ enabled = false
+ end
+ return filehandle
+end
+
+function synctex.wrapup()
+ if tmpfile then
+ renamefile(tmpfile,logfile)
+ end
end
local function flushpostamble()
+ if not filehandle then
+ return
+ end
writeanchor()
- f:write("Postamble:\n")
- f:write("Count:"..nofobjects.."\n")
+ filehandle:write("Postamble:",eol)
+ filehandle:write("Count:",nofobjects,eol)
writeanchor()
- f:write("Post scriptum:\n")
- f:close()
+ filehandle:write("Post scriptum:",eol)
+ filehandle:close()
enabled = false
end
-local pageheight = 0 -- todo: set before we do this!
+-- local function doaction(action,t,l,w,h,d)
+-- local x, y = getpos()
+-- filehandle:write(action(t,l,x,tex.pageheight-y,w,h,d))
+-- nofobjects = nofobjects + 1
+-- end
+--
+-- local function noaction(action)
+-- filehandle:write(action)
+-- nofobjects = nofobjects + 1
+-- end
+--
+-- local function b_vlist(head,current,t,l,w,h,d)
+-- return insert_before(head,current,new_latelua(function() doaction(f_vlist,t,l,w,h,d) end))
+-- end
+--
+-- local function b_hlist(head,current,t,l,w,h,d)
+-- return insert_before(head,current,new_latelua(function() doaction(f_hlist,t,l,w,h,d) end))
+-- end
+--
+-- local function e_vlist(head,current)
+-- return insert_after(head,current,new_latelua(noaction(s_vlist)))
+-- end
+--
+-- local function e_hlist(head,current)
+-- return insert_after(head,current,new_latelua(noaction(s_hlist)))
+-- end
+--
+-- local function x_vlist(head,current,t,l,w,h,d)
+-- return insert_before(head,current,new_latelua(function() doaction(f_vlist_1,t,l,w,h,d) end))
+-- end
+--
+-- local function x_hlist(head,current,t,l,w,h,d)
+-- return insert_before(head,current,new_latelua(function() doaction(f_hlist_1,t,l,w,h,d) end))
+-- end
+--
+-- generic
+--
+-- local function doaction(t,l,w,h,d)
+-- local x, y = getpos()
+-- filehandle:write(f_hlist_1(t,l,x,tex.pageheight-y,w,h,d))
+-- nofobjects = nofobjects + 1
+-- end
+
+local x_hlist do
-local function b_hlist(head,current,t,l,w,h,d)
- return insert_before(head,current,new_latelua(function()
+ local function doaction_1(t,l,w,h,d)
local x, y = getpos()
- r = r + 1
- result[r] = f_hlist(t,l,x,tex.pageheight-y,w,h,d)
+ filehandle:write(f_hlist_1(t,l,x,tex.pageheight-y,w,h,d))
nofobjects = nofobjects + 1
- end))
-end
+ end
-local function b_vlist(head,current,t,l,w,h,d)
- return insert_before(head,current,new_latelua(function()
+ -- local lastx, lasty, lastw, lasth, lastd
+ --
+ -- local function doaction_2(t,l,w,h,d)
+ -- local x, y = getpos()
+ -- y = tex.pageheight-y
+ -- filehandle:write(f_hlist_2(t,l,
+ -- x == lastx and "=" or x,
+ -- y == lasty and "=" or y,
+ -- w == lastw and "=" or w,
+ -- h == lasth and "=" or h,
+ -- d == lastd and "=" or d
+ -- ))
+ -- lastx, lasty, lastw, lasth, lastd = x, y, w, h, d
+ -- nofobjects = nofobjects + 1
+ -- end
+ --
+ -- but ... only y is supported:
+
+ local lasty = false
+
+ local function doaction_2(t,l,w,h,d)
local x, y = getpos()
- r = r + 1
- result[r] = f_vlist(t,l,x,tex.pageheight-y,w,h,d)
+ y = tex.pageheight - y
+ filehandle:write(f_hlist_2(t,l,x,y == lasty and "=" or y,w,h,d))
+ lasty = y
nofobjects = nofobjects + 1
- end))
-end
+ end
-local function e_hlist(head,current)
- return insert_after(head,current,new_latelua(function()
- r = r + 1
- result[r] = s_hlist
- nofobjects = nofobjects + 1
- end))
-end
+ local doaction = doaction_1
-local function e_vlist(head,current)
- return insert_after(head,current,new_latelua(function()
- r = r + 1
- result[r] = s_vlist
- nofobjects = nofobjects + 1
- end))
-end
+ x_hlist = function(head,current,t,l,w,h,d)
+ return insert_before(head,current,new_latelua(function() doaction(t,l,w,h,d) end))
+ end
-local function x_hlist(head,current,t,l,w,h,d)
- return insert_before(head,current,new_latelua(function()
- local x, y = getpos()
- r = r + 1
- result[r] = f_hvoid(t,l,x,tex.pageheight-y,w,h,d)
- nofobjects = nofobjects + 1
- end))
-end
+ directives.register("system.synctex.compression", function(v)
+ doaction = tonumber(v) == 2 and doaction_2 or doaction_1
+ end)
-local function x_vlist(head,current,t,l,w,h,d)
- return insert_before(head,current,new_latelua(function()
- local x, y = getpos()
- r = r + 1
- result[r] = f_vvoid(t,l,x,tex.pageheight-y,w,h,d)
- nofobjects = nofobjects + 1
- end))
end
--- local function x_glyph(head,current,t,l)
--- return insert_before(head,current,new_latelua(function()
--- local x, y = getpos()
--- r = r + 1
--- result[r] = f_glyph(t,l,x,tex.pageheight-y)
--- nofobjects = nofobjects + 1
--- end))
--- end
+-- color is already handled so no colors
--- local function x_glue(head,current,t,l)
--- return insert_before(head,current,new_latelua(function()
--- local x, y = getpos()
--- r = r + 1
--- result[r] = f_glue(t,l,x,tex.pageheight-y)
--- nofobjects = nofobjects + 1
--- end))
--- end
+local collect = nil
+local fulltrace = false
+local trace = false
+local height = 10 * 65536
+local depth = 5 * 65536
+local traceheight = 32768
+local tracedepth = 32768
--- local function x_kern(head,current,t,l,k)
--- return insert_before(head,current,new_latelua(function()
--- local x, y = getpos()
--- r = r + 1
--- result[r] = f_kern(t,l,x,tex.pageheight-y,k)
--- nofobjects = nofobjects + 1
--- end))
--- end
+trackers.register("system.synctex.visualize", function(v)
+ trace = v
+ fulltrace = v == "real"
+end)
--- local function x_rule(head,current,t,l,w,h,d)
--- return insert_before(head,current,new_latelua(function()
--- local x, y = getpos()
--- r = r + 1
--- result[r] = f_rule(t,l,x,tex.pageheight-y,w,h,d)
--- nofobjects = nofobjects + 1
--- end))
--- end
+local function inject(head,first,last,tag,line)
+ local w, h, d = getdimensions(first,getnext(last))
+ if h < height then
+ h = height
+ end
+ if d < depth then
+ d = depth
+ end
+ if trace then
+ head = insert_before(head,first,new_rule(w,fulltrace and h or traceheight,fulltrace and d or tracedepth))
+ head = insert_before(head,first,new_kern(-w))
+ end
+ head = x_hlist(head,first,tag,line,w,h,d)
+ return head
+end
-local function collect(head,t,l)
+local function collect_min(head)
local current = head
while current do
local id = getid(current)
if id == glyph_code then
local first = current
local last = current
+ local tag = 0
+ local line = 0
while true do
- id = getid(current)
- if id == glyph_code or id == disc_code then
+ if id == glyph_code then
+ local tc, lc = get_synctex_fields(current)
+ if tc and tc > 0 then
+ tag = tc
+ line = lc
+ end
last = current
- elseif id == kern_code and (getsubtype(current) == fontkern_code or getattr(current,a_fontkern)) then
+ elseif id == disc_code or (id == kern_code and getsubtype(current) == fontkern_code) then
last = current
else
- if id == glue_code then
- -- we could go on when we're in the same t/l run
- local tc, lc = get_synctex_fields(current)
- if tc > 0 then
- t, l = tc, lc
- end
- id = nil -- so no test later on
- end
- local w, h, d = getdimensions(first,getnext(last))
- -- local w, h, d = getrangedimensions(head,first,getnext(last))
- if trace then
- -- color is already handled so no colors
- head = insert_before(head,first,new_hlist(new_rule(w,32768,32768)))
+ if tag > 0 then
+ head = inject(head,first,last,tag,line)
end
-if h < 655360 then
- h = 655360
-end
-if d < 327680 then
- d = 327680
-end
- head = x_hlist(head,first,t,l,w,h,d)
break
end
current = getnext(current)
- if not current then
- local w, h, d = getdimensions(first,getnext(last))
- -- local w, h, d = getrangedimensions(head,first,getnext(last))
- if trace then
- -- color is already handled so no colors
- head = insert_before(head,first,new_hlist(new_rule(w,32768,32768)))
+ if current then
+ id = getid(current)
+ else
+ if tag > 0 then
+ head = inject(head,first,last,tag,line)
end
-if h < 655360 then
- h = 655360
-end
-if d < 327680 then
- d = 327680
-end
- head = x_hlist(head,first,t,l,w,h,d)
return head
end
end
end
- if id == hlist_code then
+ -- pick up (as id can have changed)
+ if id == hlist_code or id == vlist_code then
local list = getlist(current)
- local tc, lc = get_synctex_fields(current)
- if tc > 0 then
- t, l = tc, lc
- end
- if compact then
- if list then
- local l = collect(list,t,l)
- if l ~= list then
- setlist(current,l)
- end
+ if list then
+ local l = collect(list)
+ if l ~= list then
+ setlist(current,l)
end
- else
- local w, h, d = getwhd(current)
- if w == 0 or (h == 0 and d == 0) then
- if list then
- local l = collect(list,t,l)
- if l ~= list then
- setlist(current,l)
+ end
+ end
+ current = getnext(current)
+ end
+ return head
+end
+
+local function inject(parent,head,first,last,tag,line)
+ local w, h, d = getrangedimensions(parent,first,getnext(last))
+ if h < height then
+ h = height
+ end
+ if d < depth then
+ d = depth
+ end
+ if trace then
+ head = insert_before(head,first,new_rule(w,fulltrace and h or traceheight,fulltrace and d or tracedepth))
+ head = insert_before(head,first,new_kern(-w))
+ end
+ head = x_hlist(head,first,tag,line,w,h,d)
+ return head
+end
+
+local function collect_max(head,parent)
+ local current = head
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ local first = current
+ local last = current
+ local tag = 0
+ local line = 0
+ while true do
+ if id == glyph_code then
+ local tc, lc = get_synctex_fields(current)
+ if tc and tc > 0 then
+ if tag > 0 and (tag ~= tc or line ~= lc) then
+ head = inject(parent,head,first,last,tag,line)
+ first = current
end
+ tag = tc
+ line = lc
+ last = current
+ else
+ if tag > 0 then
+ head = inject(parent,head,first,last,tag,line)
+ tag = 0
+ end
+ first = nil
+ last = nil
end
- elseif list then
- -- head = b_hlist(head,current,t,l,w,h,d)
- head = b_hlist(head,current,0,0,w,h,d)
- local l = collect(list,t,l)
- if l ~= list then
- setlist(current,l)
+ elseif id == disc_code then
+ if not first then
+ first = current
end
- head, current = e_hlist(head,current)
- else
- -- head = x_hlist(head,current,t,l,w,h,d)
- head = x_hlist(head,current,0,0,w,h,d)
- end
- end
- elseif id == vlist_code then
- local list = getlist(current)
- local tc, lc = get_synctex_fields(current)
- if tc > 0 then
- t, l = tc, lc
- end
- if compact then
- if list then
- local l = collect(list,t,l)
- if l ~= list then
- setlist(current,l)
+ last = current
+ elseif id == kern_code and getsubtype(current) == fontkern_code then
+ if first then
+ last = current
end
- end
- else
- local w, h, d = getwhd(current)
- if w == 0 or (h == 0 and d == 0) then
- if list then
- local l = collect(list,t,l)
- if l ~= list then
- setlist(current,l)
+ elseif id == glue_code then
+ if tag > 0 then
+ local tc, lc = get_synctex_fields(current)
+ if tc and tc > 0 then
+ if tag ~= tc or line ~= lc then
+ head = inject(parent,head,first,last,tag,line)
+ tag = 0
+ break
+ end
+ else
+ head = inject(parent,head,first,last,tag,line)
+ tag = 0
+ break
end
+ else
+ tag = 0
+ break
end
- elseif list then
- -- head = b_vlist(head,current,t,l,w,h,d)
- head = b_vlist(head,current,0,0,w,h,d)
- local l = collect(list,t,l)
- if l ~= list then
- setlist(current,l)
+ id = nil -- so no test later on
+ elseif id == penalty_code then
+ -- go on (and be nice for math)
+ else
+ if tag > 0 then
+ head = inject(parent,head,first,last,tag,line)
+ tag = 0
end
- head, current = e_vlist(head,current)
+ break
+ end
+ current = getnext(current)
+ if current then
+ id = getid(current)
else
- -- head = x_vlist(head,current,t,l,w,h,d)
- head = x_vlist(head,current,0,0,w,h,d)
+ if tag > 0 then
+ head = inject(parent,head,first,last,tag,line)
+ end
+ return head
end
end
- elseif id == glue_code then
- local tc, lc = get_synctex_fields(current)
- if tc > 0 then
- t, l = tc, lc
+ end
+ -- pick up(as id can have changed)
+ if id == hlist_code or id == vlist_code then
+ local list = getlist(current)
+ if list then
+ local l = collect(list,current)
+ if l ~= list then
+ setlist(current,l)
+ end
end
- -- head = x_glue(head,current,t,l)
- -- elseif id == kern_code then
- -- local tc, lc = get_synctex_fields(current)
- -- if tc > 0 then
- -- t, l = tc, lc
- -- end
- -- -- local k = getwidth(current)
- -- -- if k ~= 0 then
- -- -- head = x_kern(head,current,t,l,k)
- -- -- end
- -- elseif id == rule_code then
- -- local tc, lc = get_synctex_fields(current)
- -- if tc > 0 then
- -- t, l = tc, lc
- -- end
- -- -- if t > 0 and l > 0 then
- -- -- local w, h, d = getwhd(current)
- -- -- head = x_rule(head,current,t,l,w,h,d)
- -- -- end
end
current = getnext(current)
end
return head
end
--- range of same numbers
+collect = collect_max
-function synctex.collect(head)
+function synctex.collect(head,where)
if enabled then
- result, r = { }, 0
- head = collect(tonut(head),0,0)
- return tonode(head), true
+ if where == "object" then
+ return head, false
+ else
+ local h = tonut(head)
+ h = collect(h,h)
+ return tonode(h), true
+ end
else
return head, false
end
@@ -431,74 +638,159 @@ end
-- also no solution for bad first file resolving in sumatra
-function synctex.flush()
+function synctex.start()
if enabled then
nofsheets = nofsheets + 1 -- could be realpageno
- flushpreamble()
- writeanchor()
- f:write("{"..nofsheets.."\n")
- if compact then
- f:write(f_vlist(0,0,0,0,tex.pagewidth,tex.pageheight,0))
- f:write("\n")
- end
- f:write(concat(result,"\n"))
- if compact then
- f:write("\n")
- f:write(s_vlist)
+ if flushpreamble() then
+ writeanchor()
+ filehandle:write("{",nofsheets,eol)
+ -- this seems to work:
+ local h = tex.pageheight
+ local w = tex.pagewidth
+ filehandle:write(z_hlist)
+ filehandle:write(f_vlist_1(0,0,0,h,w,h,0))
end
- f:write("\n")
+ end
+end
+
+function synctex.stop()
+ if enabled then
+ filehandle:write(s_vlist,s_hlist)
writeanchor()
- f:write("}"..nofsheets.."\n")
+ filehandle:write("}",nofsheets,eol)
nofobjects = nofobjects + 2
- result, r = { }, 0
end
end
+local enablers = { }
+local disablers = { }
+
+function synctex.registerenabler(f)
+ enablers[#enablers+1] = f
+end
+
+function synctex.registerdisabler(f)
+ disablers[#disablers+1] = f
+end
+
function synctex.enable()
- if not enabled and node.set_synctex_mode then
+ if not never and not enabled then
enabled = true
- node.set_synctex_mode(1)
- tex.normalsynctex = 0
- nodes.tasks.appendaction("shipouts", "after", "nodes.synctex.collect")
+ set_synctex_mode(3) -- we want details
+ if not used then
+ nodes.tasks.appendaction("shipouts", "after", "luatex.synctex.collect")
+ report_system("synctex functionality is enabled, expect 5-10 pct runtime overhead!")
+ used = true
+ end
+ for i=1,#enablers do
+ enablers[i](true)
+ end
+ end
+end
+
+function synctex.disable()
+ if enabled then
+ set_synctex_mode(0)
+ report_system("synctex functionality is disabled!")
+ enabled = false
+ for i=1,#disablers do
+ disablers[i](false)
+ end
end
end
function synctex.finish()
if enabled then
flushpostamble()
+ else
+ makenames()
+ removefile(logfile)
+ removefile(tmpfile)
end
end
+local filename = nil
+
+function synctex.pause()
+ paused = paused + 1
+ if enabled and paused == 1 then
+ set_synctex_mode(0)
+ end
+end
+
+function synctex.resume()
+ if enabled and paused == 1 then
+ set_synctex_mode(3)
+ end
+ paused = paused - 1
+end
+
-- not the best place
luatex.registerstopactions(synctex.finish)
-nodes.tasks.appendaction("shipouts", "after", "luatex.synctex.collect")
-
--- moved here
+statistics.register("synctex tracing",function()
+ if used then
+ return string.format("%i referenced files, %i files ignored, %i objects flushed, logfile: %s",
+ noftags,nofblocked,nofobjects,logfile)
+ end
+end)
-local report_system = logs.reporter("system")
-local synctex = false
+local implement = interfaces.implement
+local variables = interfaces.variables
-directives.register("system.synctex", function(v)
- if v == "context" then
- luatex.synctex.enable()
- setcount("normalsynctex",0)
- synctex = true
+function synctex.setup(t)
+ if t.state == variables.never then
+ synctex.disable() -- just in case
+ never = true
+ return
+ end
+ if t.method == variables.max then
+ collect = collect_max
else
- v = tonumber(v) or (toboolean(v,true) and 1) or (v == "zipped" and 1) or (v == "unzipped" and -1) or 0
- setcount("normalsynctex",v)
- synctex = v ~= 0
+ collect = collect_min
end
- if synctex then
- report_system("synctex functionality is enabled (%s), expect runtime overhead!",tostring(v))
+ if t.state == variables.start then
+ synctex.enable()
else
- report_system("synctex functionality is disabled!")
+ synctex.disable()
end
-end)
+end
-statistics.register("synctex tracing",function()
- if synctex or getcount("normalsynctex") ~= 0 then
- return "synctex has been enabled (extra log file generated)"
- end
-end)
+implement {
+ name = "synctexblockfilename",
+ arguments = "string",
+ actions = synctex.blockfilename,
+}
+
+implement {
+ name = "synctexsetfilename",
+ arguments = "string",
+ actions = synctex.setfilename,
+}
+
+implement {
+ name = "synctexresetfilename",
+ actions = synctex.resetfilename,
+}
+
+implement {
+ name = "setupsynctex",
+ actions = synctex.setup,
+ arguments = {
+ {
+ { "state" },
+ { "method" },
+ },
+ },
+}
+
+implement {
+ name = "synctexpause",
+ actions = synctex.pause,
+}
+
+implement {
+ name = "synctexresume",
+ actions = synctex.resume,
+}
diff --git a/tex/context/base/mkiv/node-tra.lua b/tex/context/base/mkiv/node-tra.lua
index 8c79e0ab8..f12599866 100644
--- a/tex/context/base/mkiv/node-tra.lua
+++ b/tex/context/base/mkiv/node-tra.lua
@@ -11,6 +11,7 @@ if not modules then modules = { } end modules ['node-tra'] = {
might become a runtime module instead. This module will be cleaned up!
--ldx]]--
+local next = next
local utfchar = utf.char
local format, match, gmatch, concat, rep = string.format, string.match, string.gmatch, table.concat, string.rep
local lpegmatch = lpeg.match
@@ -38,7 +39,6 @@ local nuts = nodes.nuts
local tonut = nuts.tonut
local tonode = nuts.tonode
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
@@ -298,34 +298,38 @@ nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
local function listtoutf(h,joiner,textonly,last,nodisc)
local w = { }
+ local n = 0
while h do
local c, id = isglyph(h)
if c then
- w[#w+1] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
+ n = n + 1 ; w[n] = c >= 0 and utfchar(c) or formatters["<%i>"](c)
if joiner then
- w[#w+1] = joiner
+ n = n + 1 ; w[n] = joiner
end
elseif id == disc_code then
local pre, pos, rep = getdisc(h)
if not nodisc then
- w[#w+1] = formatters["[%s|%s|%s]"] (
+ n = n + 1 ; w[n] = formatters["[%s|%s|%s]"] (
pre and listtoutf(pre,joiner,textonly) or "",
pos and listtoutf(pos,joiner,textonly) or "",
rep and listtoutf(rep,joiner,textonly) or ""
)
elseif rep then
- w[#w+1] = listtoutf(rep,joiner,textonly) or ""
+ n = n + 1 ; w[n] = listtoutf(rep,joiner,textonly) or ""
+ end
+ if joiner then
+ n = n + 1 ; w[n] = joiner
end
elseif textonly then
if id == glue_code then
if getwidth(h) > 0 then
- w[#w+1] = " "
+ n = n + 1 ; w[n] = " "
end
elseif id == hlist_code or id == vlist_code then
- w[#w+1] = "[]"
+ n = n + 1 ; w[n] = "[]"
end
else
- w[#w+1] = "[-]"
+ n = n + 1 ; w[n] = "[-]"
end
if h == last then
break
@@ -333,7 +337,7 @@ local function listtoutf(h,joiner,textonly,last,nodisc)
h = getnext(h)
end
end
- return concat(w)
+ return concat(w,"",1,(w[n] == joiner) and (n-1) or n)
end
function nodes.listtoutf(h,joiner,textonly,last,nodisc)
@@ -376,8 +380,7 @@ local f_z_f = formatters["%0.5Fpt minus %0.5F%s"]
local f_z_m = formatters["%0.5Fpt minus %0.5Fpt"]
local f_z_z = formatters["%0.5Fpt"]
-local tonut = nodes.tonut
-local getfield = nodes.nuts.getfield
+local tonut = nodes.tonut
local function nodetodimen(n)
n = tonut(n)
diff --git a/tex/context/base/mkiv/node-tsk.lua b/tex/context/base/mkiv/node-tsk.lua
index c33f0e9f4..1ce7ab1dc 100644
--- a/tex/context/base/mkiv/node-tsk.lua
+++ b/tex/context/base/mkiv/node-tsk.lua
@@ -386,7 +386,7 @@ tasks.new {
tasks.new {
name = "shipouts",
- arguments = 0,
+ arguments = 1,
-- nostate = true, -- maybe but only for main ones so little gain
processor = nodeprocessor,
sequence = {
diff --git a/tex/context/base/mkiv/node-tst.lua b/tex/context/base/mkiv/node-tst.lua
index 1109f28a3..7ad35bd71 100644
--- a/tex/context/base/mkiv/node-tst.lua
+++ b/tex/context/base/mkiv/node-tst.lua
@@ -26,7 +26,6 @@ local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
local nuts = nodes.nuts
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
diff --git a/tex/context/base/mkiv/node-typ.lua b/tex/context/base/mkiv/node-typ.lua
index dea48cda8..021bfefc2 100644
--- a/tex/context/base/mkiv/node-typ.lua
+++ b/tex/context/base/mkiv/node-typ.lua
@@ -15,12 +15,10 @@ local nuts = nodes.nuts
local tonode = nuts.tonode
local tonut = nuts.tonut
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setchar = nuts.setchar
local setattrlist = nuts.setattrlist
-local getfield = nuts.getfield
local getfont = nuts.getfont
local getattrlist = nuts.getattrlist
diff --git a/tex/context/base/mkiv/pack-box.mkiv b/tex/context/base/mkiv/pack-box.mkiv
index 8279fcd71..c53b4520c 100644
--- a/tex/context/base/mkiv/pack-box.mkiv
+++ b/tex/context/base/mkiv/pack-box.mkiv
@@ -1138,4 +1138,21 @@
\box\nextbox
\egroup}
+\unexpanded\def\overlayimage#1%
+ {\vpack to \overlayheight\bgroup
+ \vss
+ \hpack to \overlaywidth\bgroup
+ \hss
+ \externalfigure[#1][\c!factor=\v!min,\c!equalwidth=\overlaywidth,\c!equalheight=\overlayheight]%
+ \hss
+ \egroup
+ \vss
+ \egroup}
+
+\unexpanded\def\clippedoverlayimage#1%
+ {\clip
+ [\c!width=\overlaywidth,\c!height=\overlayheight]
+ {\overlayimage{#1}}}
+
+
\protect \endinput
diff --git a/tex/context/base/mkiv/pack-com.mkiv b/tex/context/base/mkiv/pack-com.mkiv
index 8abee1daf..59354208e 100644
--- a/tex/context/base/mkiv/pack-com.mkiv
+++ b/tex/context/base/mkiv/pack-com.mkiv
@@ -78,7 +78,7 @@
\newcount\c_pack_combinations_nesting % local
\newcount\c_pack_combinations_x % global
-\newcount\c_pack_combinations_n % global
+\newcount\c_pack_combinations_y % global
\newcount\c_pack_combinations_max % global
\newbox \b_pack_combinations_captions % global
\newbox \b_pack_combinations_temp % global
@@ -93,7 +93,7 @@
\initializeboxstack{\??combination temp}
\newcount\c_pack_combinations_x_saved
-\newcount\c_pack_combinations_n_saved
+\newcount\c_pack_combinations_y_saved
\newcount\c_pack_combinations_max_saved
\newbox \b_pack_combinations_captions_saved
\newbox \b_pack_combinations_temp_saved
@@ -106,7 +106,7 @@
{\advance\c_pack_combinations_nesting\plusone
\ifnum\c_pack_combinations_nesting>\plusone
\c_pack_combinations_x_saved \c_pack_combinations_x
- \c_pack_combinations_n_saved \c_pack_combinations_n
+ \c_pack_combinations_y_saved \c_pack_combinations_y
\c_pack_combinations_max_saved\c_pack_combinations_max
\setbox\b_pack_combinations_captions_saved\box\b_pack_combinations_captions
\setbox\b_pack_combinations_temp_saved \box\b_pack_combinations_temp
@@ -119,7 +119,7 @@
\def\pack_combinations_pop
{\ifnum\c_pack_combinations_nesting>\plusone
\global\c_pack_combinations_x \c_pack_combinations_x_saved
- \global\c_pack_combinations_n \c_pack_combinations_n_saved
+ \global\c_pack_combinations_y \c_pack_combinations_y_saved
\global\c_pack_combinations_max\c_pack_combinations_max_saved
\global\setbox\b_pack_combinations_captions\box\b_pack_combinations_captions_saved
\global\setbox\b_pack_combinations_temp \box\b_pack_combinations_temp_saved
@@ -204,7 +204,7 @@
% \unexpanded\def\stopcombination
% {\bgroup
% \scratchtoks{{}}%
-% \dorecurse\c_pack_combinations_n
+% \dorecurse\c_pack_combinations_y
% {\scratchtoks\expandafter{\the\scratchtoks{}{}}}%
% \expandafter\egroup\the\scratchtoks
% \egroup
@@ -219,7 +219,7 @@
\unexpanded\def\pack_common_caption_stop {\removeunwantedspaces\egroup}
\unexpanded\def\stopcombination
- {\bgroup\normalexpanded{\egroup{}\ntimes{{}{}}\c_pack_combinations_n}% brr
+ {\bgroup\normalexpanded{\egroup{}\ntimes{{}{}}\c_pack_combinations_y}% brr
\dostoptagged
\egroup
\pack_combinations_pop
@@ -271,15 +271,20 @@
\let\combination\empty % permits \combination{}{} handy for cld
\normalexpanded{\pack_combinations_start_indeed[\currentcombinationspec]}}
+\let\pack_combinations_check_x_y\relax
+
\unexpanded\def\pack_combinations_start_indeed[#1*#2*#3]%
{\global\c_pack_combinations_x#1\relax
- \global\c_pack_combinations_n#2\relax
+ \global\c_pack_combinations_y#2\relax
+ \setexpandedcombinationparameter\c!nx{\the\c_pack_combinations_x}% in case we access it
+ \setexpandedcombinationparameter\c!ny{\the\c_pack_combinations_y}% in case we access it
+ \pack_combinations_check_x_y
\dotagcombination
\global\setbox\b_pack_combinations_captions\emptybox
\global\c_pack_combinations_max\c_pack_combinations_x
- \multiply\c_pack_combinations_n\c_pack_combinations_x
+ \multiply\c_pack_combinations_y\c_pack_combinations_x
\tabskip\zeropoint
- \halign \ifx\p_width\v!fit\else to \p_width \fi \bgroup
+ \halign \ifx\p_width\v!fit\else to \p_width \fi \bgroup % repetitive preamble
\aligntab
\m_pack_combinations_leftfiller
\alignmark\alignmark
@@ -382,8 +387,8 @@
\m_pack_combinations_valigner{\box\b_pack_combinations_content}%
% we need to save the caption for a next alignment line
\pack_combinations_save_caption}%
- \ifnum\c_pack_combinations_n>\plusone
- \global\advance\c_pack_combinations_n\minusone
+ \ifnum\c_pack_combinations_y>\plusone
+ \global\advance\c_pack_combinations_y\minusone
\global\advance\c_pack_combinations_x\minusone
\ifcase\c_pack_combinations_x
\doubleexpandafter\pack_combinations_pickup_package_pair_a
@@ -486,19 +491,40 @@
\let\stopfloatcombination\relax
+\unexpanded\def\pack_combinations_float_hack_a#1%
+ {\strc_floats_build_box_separate_split{\getlocalfloat{#1}}%
+ \box\b_strc_floats_separate_content}
+
+\unexpanded\def\pack_combinations_float_hack_b#1%
+ {\box\b_strc_floats_separate_caption}
+
\def\pack_combinations_start_float[#1][#2]%
{\ifinsidefloat\else\dontleavehmode\fi % tricky, floatcombinations fail to align well otherwise
\vbox\bgroup
+ \strc_floats_build_box_separate_set
%\insidecolumnstrue % trick, forces no centering, todo: proper switch/feature
\postcenterfloatmethod\zerocount
\forcelocalfloats
\unexpanded\def\stopfloatcombination{\pack_combinations_stop_float{#1}}}
+\def\pack_combinations_float_check_x_y
+ {\ifnum\numexpr\c_pack_combinations_x*\c_pack_combinations_y\relax<\noflocalfloats\relax
+ \global\c_pack_combinations_x\noflocalfloats
+ \global\c_pack_combinations_y\plusone
+ \fi
+ \let\pack_combinations_check_x_y\relax}%
+
\def\pack_combinations_stop_float#1%
{\scratchtoks\emptytoks
\dorecurse\noflocalfloats
- {\appendetoks{\getlocalfloat{\recurselevel}}{}\to\scratchtoks}% brrr
- \expanded{\startcombination[#1]\the\scratchtoks}\stopcombination
+ {\appendetoks
+ {\pack_combinations_float_hack_a{\recurselevel}}%
+ {\pack_combinations_float_hack_b{\recurselevel}}%
+ \to\scratchtoks}% brrr
+ \let\pack_combinations_check_x_y\pack_combinations_float_check_x_y
+ \doifassignmentelse{#1}
+ {\expanded{\startcombination[#1]\the\scratchtoks}\stopcombination}%
+ {\expanded{\startcombination[#1]\the\scratchtoks}\stopcombination}%
\resetlocalfloats
\egroup}
@@ -771,25 +797,25 @@
\pairedboxparameter\c!inbetween
\box\b_pack_pairedboxes_second}
-\setvalue{\??pairedboxalign \v!left}% 0
+\setvalue{\??pairedboxalign\v!left}% 0
{\let\pack_pairedboxes_align_l\relax
\let\pack_pairedboxes_align_r\hss
\let\pack_pairedboxes_align_t\relax
\let\pack_pairedboxes_align_b\relax}
-\setvalue{\??pairedboxalign \v!right}% 1
+\setvalue{\??pairedboxalign\v!right}% 1
{\let\pack_pairedboxes_align_l\hss
\let\pack_pairedboxes_align_r\relax
\let\pack_pairedboxes_align_t\relax
\let\pack_pairedboxes_align_b\relax}
-\setvalue{\??pairedboxalign \v!high}% 2
+\setvalue{\??pairedboxalign\v!high}% 2
{\let\pack_pairedboxes_align_l\relax
\let\pack_pairedboxes_align_r\relax
\let\pack_pairedboxes_align_t\relax
\let\pack_pairedboxes_align_b\vss}
-\setvalue{\??pairedboxalign \v!low}% 3
+\setvalue{\??pairedboxalign\v!low}% 3
{\let\pack_pairedboxes_align_l\relax
\let\pack_pairedboxes_align_r\relax
\let\pack_pairedboxes_align_t\vss
diff --git a/tex/context/base/mkiv/pack-obj.lua b/tex/context/base/mkiv/pack-obj.lua
index cba0dcf8f..f68c1ba0c 100644
--- a/tex/context/base/mkiv/pack-obj.lua
+++ b/tex/context/base/mkiv/pack-obj.lua
@@ -27,6 +27,7 @@ local new_latelua = nuts.pool.latelua
local settexdimen = tokens.setters.dimen
local gettexbox = tokens.getters.box
+local settexbox = tokens.setters.box
local gettexdimen = tokens.getters.dimen
local gettexcount = tokens.getters.count
@@ -124,22 +125,37 @@ objects = {
n = 0,
}
-function objects.register(ns,id,b,referenced)
+local objects = objects
+
+function objects.register(ns,id,b,referenced,offset,mode)
objects.n = objects.n + 1
- nodes.handlers.finalize(gettexbox(b))
- data[ns][id] = {
- codeinjections.registerboxresource(b), -- a box number
- gettexdimen("objectoff"),
- referenced
- }
+ nodes.handlers.finalize(gettexbox(b),"object")
+ if mode == 0 then
+ -- tex
+ data[ns][id] = {
+ codeinjections.registerboxresource(b), -- a box number
+ offset,
+ referenced or false,
+ mode,
+ }
+ else
+ -- box (backend)
+ data[ns][id] = {
+ codeinjections.registerboxresource(b,offset), -- a box number
+ false,
+ referenced,
+ mode,
+ }
+ end
end
-function objects.restore(ns,id)
+function objects.restore(ns,id) -- why not just pass a box number here too (ok, we also set offset)
local d = data[ns][id]
if d then
local index = d[1]
local offset = d[2]
local status = d[3]
+ local mode = d[4]
local hbox = codeinjections.restoreboxresource(index) -- a nut !
if status then
local list = getlist(hbox)
@@ -149,10 +165,10 @@ function objects.restore(ns,id)
setlink(list,page)
end
setbox("objectbox",hbox)
- settexdimen("objectoff",offset)
+ settexdimen("objectoff",offset or 0)
else
setbox("objectbox",nil)
- settexdimen("objectoff",0)
+ settexdimen("objectoff",0) -- for good old times
end
end
@@ -161,7 +177,7 @@ function objects.dimensions(index)
if d then
return codeinjections.boxresourcedimensions(d[1])
else
- return 0, 0, 0
+ return 0, 0, 0, 0
end
end
@@ -184,13 +200,13 @@ end
implement {
name = "registerreferencedobject",
- arguments = { "string", "string", "integer", true },
+ arguments = { "string", "string", "integer", true, "dimension", "integer" },
actions = objects.register,
}
implement {
name = "registerobject",
- arguments = { "string", "string", "integer" },
+ arguments = { "string", "string", "integer", false, "dimension", "integer" },
actions = objects.register,
}
@@ -238,15 +254,15 @@ implement {
name = "getobjectdimensions",
arguments = { "string", "string" },
actions = function(ns,id)
- local o = data[ns][id]
- local w, h, d = 0, 0, 0
- if d then
- w, h, d = codeinjections.boxresourcedimensions(o[1])
+ local object = data[ns][id]
+ local w, h, d, o = 0, 0, 0, 0
+ if object then
+ w, h, d, o = codeinjections.boxresourcedimensions(object[1])
end
settexdimen("objectwd",w or 0)
settexdimen("objectht",h or 0)
settexdimen("objectdp",d or 0)
- settexdimen("objectoff",o[2])
+ settexdimen("objectoff",o or #objects > 2 and object[2] or 0)
end
}
diff --git a/tex/context/base/mkiv/pack-obj.mkiv b/tex/context/base/mkiv/pack-obj.mkiv
index 23608846d..aeb5cb4f4 100644
--- a/tex/context/base/mkiv/pack-obj.mkiv
+++ b/tex/context/base/mkiv/pack-obj.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{pack-obj}{1.001}
+\registerctxluafile{pack-obj}{}
%D \macros
%D {setobject,getobject,ifinobject}
@@ -51,65 +51,79 @@
\newdimen\objectht \def\objectheight{\the\objectht}
\newdimen\objectdp \def\objectdepth {\the\objectdp}
-% If I have time I will use the unreferenced variant for e.g. mp reuse.
+%D If I have time I will use the unreferenced variant for e.g. mp reuse.
-% todo formmargin per obj
-
-\unexpanded\def\setreferencedobject #1#2{\begingroup\objectoff\objectoffset\inobjecttrue\dowithnextbox{\pack_objects_set_yes{#1}{#2}}}
-\unexpanded\def\settightreferencedobject #1#2{\begingroup\objectoff\zeropoint \inobjecttrue\dowithnextbox{\pack_objects_set_yes{#1}{#2}}}
-\unexpanded\def\setunreferencedobject #1#2{\begingroup\objectoff\objectoffset\inobjecttrue\dowithnextbox{\pack_objects_set_nop{#1}{#2}}}
-\unexpanded\def\settightunreferencedobject#1#2{\begingroup\objectoff\zeropoint \inobjecttrue\dowithnextbox{\pack_objects_set_nop{#1}{#2}}}
+\unexpanded\def\setreferencedobject #1#2{\begingroup\synctexpause\objectoff\objectoffset\inobjecttrue\dowithnextbox{\pack_objects_set_yes{#1}{#2}}}
+\unexpanded\def\settightreferencedobject #1#2{\begingroup\synctexpause\objectoff\zeropoint \inobjecttrue\dowithnextbox{\pack_objects_set_yes{#1}{#2}}}
+\unexpanded\def\setunreferencedobject #1#2{\begingroup\synctexpause\objectoff\objectoffset\inobjecttrue\dowithnextbox{\pack_objects_set_nop{#1}{#2}}}
+\unexpanded\def\settightunreferencedobject#1#2{\begingroup\synctexpause\objectoff\zeropoint \inobjecttrue\dowithnextbox{\pack_objects_set_nop{#1}{#2}}}
\let\setobject \setreferencedobject
\let\settightobject\settightreferencedobject
+%D We can get subtle differences in boundingboxes but both methods work ok.
+
+\newconstant\c_pack_objects_offset_mode % 0=tex 1=box
+
+\unexpanded\def\pack_objects_temp_check % this will go away
+ {\ifnum\texenginefunctionality<6401\relax
+ \c_pack_objects_offset_mode\zerocount
+ \fi}
+
\unexpanded\def\pack_objects_set_yes#1#2%
- {\ifdim\objectoff>\zeropoint\relax
- \pack_objects_package
+ {\pack_objects_temp_check % this will go away
+ \ifcase\c_pack_objects_offset_mode
+ \ifzeropt\objectoff
+ \pack_objects_package_nop\nextbox
+ \else
+ \pack_objects_package_yes\nextbox
+ \fi
\else
- \setbox\objectbox\box\nextbox
+ \pack_objects_package_nop\nextbox
\fi
- \clf_registerreferencedobject{#1}{#2}\objectbox
+ \clf_registerreferencedobject{#1}{#2}\objectbox\objectoff\c_pack_objects_offset_mode
+ \synctexresume
\endgroup}
\unexpanded\def\pack_objects_set_nop#1#2%
- {\ifdim\objectoff>\zeropoint\relax
- \pack_objects_package
+ {\pack_objects_temp_check % this will go away
+ \ifcase\c_pack_objects_offset_mode
+ \ifzeropt\objectoff
+ \pack_objects_package_nop\nextbox
+ \else
+ \pack_objects_package_yes\nextbox
+ \fi
\else
- \setbox\objectbox\box\nextbox
+ \pack_objects_package_nop\nextbox
\fi
- \clf_registerobject{#1}{#2}\objectbox
+ \clf_registerobject{#1}{#2}\objectbox\objectoff\c_pack_objects_offset_mode
+ \synctexresume
\endgroup}
-\def\pack_objects_package
- {\objectwd\dimexpr\wd\nextbox+2\objectoff\relax
- \objectht\dimexpr\ht\nextbox+ \objectoff\relax
- \objectdp\dimexpr\dp\nextbox+ \objectoff\relax
- \setbox\objectbox\hpack
- {\hskip\objectoff
- \box\nextbox}%
- \wd\objectbox\objectwd
- \ht\objectbox\objectht
- \dp\objectbox\objectdp}
+\def\pack_objects_package_nop#1% we pack because otherwise \ruledhbox is still tight
+ {\setbox\objectbox\hpack{\box#1}}
-\def\pack_objects_repackage
- {\objectwd\dimexpr\wd\objectbox-2\objectoff\relax
- \objectht\dimexpr\ht\objectbox- \objectoff\relax
- \objectdp\dimexpr\dp\objectbox- \objectoff\relax
+\def\pack_objects_package_yes#1%
+ {\objectwd\dimexpr\wd#1+2\objectoff\relax
+ \objectht\dimexpr\ht#1+ \objectoff\relax
+ \objectdp\dimexpr\dp#1+ \objectoff\relax
\setbox\objectbox\hpack
- {\hskip-\objectoff
- \box\objectbox}%
+ {\hskip\objectoff
+ \box#1}%
\wd\objectbox\objectwd
\ht\objectbox\objectht
\dp\objectbox\objectdp}
\unexpanded\def\getobject#1#2%
{\begingroup
+ \synctexpause
\clf_restoreobject{#1}{#2}%
- \ifdim\objectoff>\zeropoint
- \pack_objects_repackage
+ \ifzeropt\objectoff\else
+ \objectoff-\objectoff
+ \pack_objects_package_yes\objectbox
\fi
\box\objectbox
+ \synctexresume
\endgroup}
%D If needed one can ask for the dimensions of an object with:
@@ -118,8 +132,8 @@
%D \getobjectdimensions{class}{name}
%D \stoptyping
%D
-%D The results are reported in \type {\objectwidth}, \type
-%D {\objectheight} and \type {\objectdepth}.
+%D The results are reported in \type {\objectwidth}, \type {\objectheight} and \type
+%D {\objectdepth} as well as \type {\objectoffset}.
\unexpanded\def\getobjectdimensions#1#2%
{\clf_getobjectdimensions{#1}{#2}}
@@ -127,8 +141,7 @@
%D \macros
%D {doifobjectfoundelse,doifobjectreferencefoundelse}
%D
-%D To prevent redundant definition of objects, one can use
-%D the next tests:
+%D To prevent redundant definition of objects, one can use the next tests:
%D
%D \starttyping
%D \doifobjectfoundelse{class}{object}{do then}{do else}
diff --git a/tex/context/base/mkiv/pack-rul.lua b/tex/context/base/mkiv/pack-rul.lua
index 30eda7dd2..c9771546c 100644
--- a/tex/context/base/mkiv/pack-rul.lua
+++ b/tex/context/base/mkiv/pack-rul.lua
@@ -20,6 +20,8 @@ if not modules then modules = { } end modules ['pack-rul'] = {
local type = type
+local context = context
+
local hlist_code = nodes.nodecodes.hlist
local vlist_code = nodes.nodecodes.vlist
local box_code = nodes.listcodes.box
@@ -33,8 +35,6 @@ local implement = interfaces.implement
local nuts = nodes.nuts
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getlist = nuts.getlist
@@ -47,6 +47,8 @@ local getdir = nuts.getdir
local setshift = nuts.setshift
local setwidth = nuts.setwidth
local getwidth = nuts.getwidth
+local setboxglue = nuts.setboxglue
+local getboxglue = nuts.getboxglue
local hpack = nuts.hpack
local traverse_id = nuts.traverse_id
@@ -125,9 +127,8 @@ local function doreshapeframedbox(n)
local subtype = getsubtype(h)
if subtype == box_code or subtype == line_code then
local p = hpack(l,maxwidth,'exactly',getdir(h)) -- multiple return value
- setfield(h,"glue_set",getfield(p,"glue_set"))
- setfield(h,"glue_order",getfield(p,"glue_order"))
- setfield(h,"glue_sign",getfield(p,"glue_sign"))
+ local set, order, sign = getboxglue(p)
+ setboxglue(h,set,order,sign)
setlist(p)
flush_node(p)
elseif checkformath and subtype == equation_code then
diff --git a/tex/context/base/mkiv/pack-rul.mkiv b/tex/context/base/mkiv/pack-rul.mkiv
index eec7b8cb3..c208baaf0 100644
--- a/tex/context/base/mkiv/pack-rul.mkiv
+++ b/tex/context/base/mkiv/pack-rul.mkiv
@@ -18,7 +18,7 @@
%D packaging and expansion we also keep tracing reasonable. For instance, multiple
%D stacked backgrounds can slow down a run if not optimized this way.
-\registerctxluafile{pack-rul}{1.001}
+\registerctxluafile{pack-rul}{}
\unprotect
@@ -1127,6 +1127,12 @@
\let\delayedendstrut\relax
\let\delayedstrut \relax
+\let\localoffset\empty
+\let\localwidth \empty
+\let\localheight\empty
+\let\localformat\empty
+\let\localstrut \empty
+
\unexpanded\def\pack_framed_process_indeed
{\d_framed_frameoffset\framedparameter\c!frameoffset
\edef\p_framed_backgroundoffset{\framedparameter\c!backgroundoffset}%
@@ -1806,7 +1812,7 @@
\dp\b_framed_normal\scratchdimen
\hpack{\box\b_framed_normal}}
-\installframedlocator \v!lohi
+\installframedlocator \v!lohi % maybe also \v!center
{\pack_framed_locator_before\v!middle}
{\pack_framed_locator_after \v!middle}
@@ -2470,6 +2476,7 @@
%D \dontcomplain
%D
%D \startbuffer
+%D \unprotect
%D \vbox to \vsize
%D \bgroup
%D \startalignment[middle]
@@ -2503,6 +2510,7 @@
%D \vss
%D \stopalignment
%D \egroup
+%D \protect
%D \stopbuffer
%D
%D \getbuffer \page
diff --git a/tex/context/base/mkiv/page-box.mkvi b/tex/context/base/mkiv/page-box.mkvi
index fa85aef68..083a94390 100644
--- a/tex/context/base/mkiv/page-box.mkvi
+++ b/tex/context/base/mkiv/page-box.mkvi
@@ -133,6 +133,10 @@
\page_boxes_apply_negate#box%
\fi\fi}
+\let\p_page_layouts_scale\relax
+\let\p_page_layouts_sx \relax
+\let\p_page_layouts_sy \relax
+
\def\page_boxes_apply_scale#box%
{\edef\p_page_layouts_scale{\layoutparameter\c!scale}%
\ifdim\p_page_layouts_scale\points=\onepoint
diff --git a/tex/context/base/mkiv/page-brk.mkiv b/tex/context/base/mkiv/page-brk.mkiv
index b651cc8b4..e18c0d923 100644
--- a/tex/context/base/mkiv/page-brk.mkiv
+++ b/tex/context/base/mkiv/page-brk.mkiv
@@ -137,7 +137,7 @@
{\def\page_breaks_process[##1]{}}
\installpagebreakmethod \s!dummy
- {\page_otr_flush_all_floats
+ {\page_otr_command_flush_all_floats
\page_otr_command_next_page
\page_otr_insert_dummy_page}
@@ -150,11 +150,11 @@
\unexpanded\def\page_breaks_unknown % how often called ?
{\doifelseinstring{+}\page_breaks_current_option
- {\page_otr_flush_all_floats
+ {\page_otr_command_flush_all_floats
\page_otr_command_next_page
\dorecurse\page_breaks_current_option\page_otr_insert_dummy_page}
{\doifelsenumber\page_breaks_current_option
- {\page_otr_flush_all_floats
+ {\page_otr_command_flush_all_floats
\page_otr_command_next_page
\doloop
{\ifnum\userpageno<\page_breaks_current_option\relax
@@ -179,7 +179,7 @@
\installpagebreakmethod \v!yes
{\ifconditional\c_page_breaks_enabled
- \page_otr_flush_all_floats
+ \page_otr_command_flush_all_floats
\page_otr_command_next_page
\ifinsidecolumns % this will move to MUL
\page_otr_eject_page % otherwise sometimes no change
@@ -229,12 +229,12 @@
{\resetcurrentstructuremarkswithpage\page_otr_insert_dummy_page}
\installpagebreakmethod \v!left
- {\page_otr_flush_all_floats
+ {\page_otr_command_flush_all_floats
\page_otr_command_next_page_and_inserts
\doifbothsidesoverruled\donothing\page_reset_marks_and_insert_dummy\donothing}
\installpagebreakmethod \v!right
- {\page_otr_flush_all_floats
+ {\page_otr_command_flush_all_floats
\page_otr_command_next_page_and_inserts
\doifbothsidesoverruled\donothing\donothing\page_reset_marks_and_insert_dummy}
@@ -269,7 +269,7 @@
\fi}
\installpagebreakmethod \v!last
- {\page_otr_flush_all_floats
+ {\page_otr_command_flush_all_floats
\page_otr_command_next_page_and_inserts
\relax
\doifbothsidesoverruled
@@ -504,7 +504,7 @@
\ifdim\pagetotal<\pagegoal \relax
\getnoflines\pagegoal
\ifdim\dimexpr\page_check_amount-\noflines\lineheight\relax>-\lineheight
- \pagecheckparameter\c!before
+ \pagecheckerparameter\c!before
\penalty-\plustenthousand
\pagecheckerparameter\c!after
\else
diff --git a/tex/context/base/mkiv/page-cst.lua b/tex/context/base/mkiv/page-cst.lua
index 03707a312..8b8214ab3 100644
--- a/tex/context/base/mkiv/page-cst.lua
+++ b/tex/context/base/mkiv/page-cst.lua
@@ -8,8 +8,8 @@ if not modules then modules = { } end modules ["page-cst"] = {
-- todo: check what is used
-local next, type = next, type
-local ceil, floor, odd, round = math.ceil, math.floor, math.odd, math.round
+local next, type, tonumber = next, type, tonumber
+local ceil, odd, round = math.ceil, math.odd, math.round
local lower = string.lower
local copy = table.copy
@@ -42,8 +42,6 @@ local vpack = nuts.vpack
local flushlist = nuts.flush_list
----- removenode = nuts.remove
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setlist = nuts.setlist
local setnext = nuts.setnext
@@ -238,7 +236,7 @@ function columnsets.define(t)
end
end
--
- texsetdimen("d_page_grid_column_width",dataset.width)
+ texsetdimen("d_page_grd_column_width",dataset.width)
--
setstate(dataset,true)
--
@@ -344,15 +342,15 @@ function columnsets.prepareflush(name)
columns[c] = new_vlist(column[1],widths[c],height,0) -- linedepth
end
--
- texsetcount("c_page_grid_first_column",firstcolumn)
- texsetcount("c_page_grid_last_column",lastcolumn)
+ texsetcount("c_page_grd_first_column",firstcolumn)
+ texsetcount("c_page_grd_last_column",lastcolumn)
end
function columnsets.flushcolumn(name,column)
local dataset = data[name]
local columns = dataset.columns
local packed = columns[column]
- setbox("b_page_grid_column",packed)
+ setbox("b_page_grd_column",packed)
columns[column] = nil
end
@@ -639,10 +637,10 @@ function columnsets.check(t)
r = dataset.currentrow
end
if c == 0 or r == 0 or c > nofcolumns or r > nofrows then
- texsetcount("c_page_grid_reserved_state",5)
+ texsetcount("c_page_grd_reserved_state",5)
return
end
--- report("checking width %p, height %p, depth %p, slot (%i,%i)",boxwidth,boxheight,boxdepth,c,r)
+ -- report("checking width %p, height %p, depth %p, slot (%i,%i)",boxwidth,boxheight,boxdepth,c,r)
local nr = ceil(boxheight/(lineheight+linedepth))
--
local action = methods[method]
@@ -668,9 +666,9 @@ function columnsets.check(t)
dataset.reserved_r = rfound
dataset.reserved_nc = nc
dataset.reserved_nr = nr
- texsetcount("c_page_grid_reserved_state",0)
- texsetdimen("d_page_grid_reserved_height",ht)
- texsetdimen("d_page_grid_reserved_width",wd)
+ texsetcount("c_page_grd_reserved_state",0)
+ texsetdimen("d_page_grd_reserved_height",ht)
+ texsetdimen("d_page_grd_reserved_width",wd)
-- report("using (%i,%i) x (%i,%i) @ (%p,%p)",cfound,rfound,nc,nr,wd,ht)
else
dataset.reserved_ht = false
@@ -679,9 +677,9 @@ function columnsets.check(t)
dataset.reserved_r = false
dataset.reserved_nc = false
dataset.reserved_nr = false
- texsetcount("c_page_grid_reserved_state",4)
- -- texsetdimen("d_page_grid_reserved_height",0)
- -- texsetdimen("d_page_grid_reserved_width",0)
+ texsetcount("c_page_grd_reserved_state",4)
+ -- texsetdimen("d_page_grd_reserved_height",0)
+ -- texsetdimen("d_page_grd_reserved_width",0)
-- report("no slot found")
end
end
@@ -768,7 +766,8 @@ end
-- while head do
-- local id = getid(head)
-- if id == hlist_code or id == vlist_code or id == rule_code then -- <= rule_code
--- used = used + getfield(head,"height") + getfield(head,"depth")
+-- local wd, ht, dp = getwhd(head)
+-- used = used + ht + dp
-- line = true
-- elseif id == glue_code then
-- if line then
@@ -860,7 +859,8 @@ end
-- local id = getid(head)
-- local hd = 0
-- if id == hlist_code or id == vlist_code or id == rule_code then -- <= rule_code
--- hd = getfield(head,"height") + getfield(head,"depth")
+-- local wd, ht, dp = getwhd(head)
+-- hd = ht + dp
-- elseif id == glue_code then
-- hd = getwidth(head)
-- elseif id == kern_code then
@@ -1130,14 +1130,14 @@ function columnsets.setvsize(name)
n = 0
end
local gap = n*(dataset.lineheight+dataset.linedepth)
- texsetdimen("d_page_grid_gap_height",gap)
+ texsetdimen("d_page_grd_gap_height",gap)
-- can be integrated
-- report("state %a, n %a, column %a, row %a",dataset.state,n,dataset.currentcolumn,dataset.currentrow)
end
function columnsets.sethsize(name)
local dataset = data[name]
- texsetdimen("d_page_grid_column_width",dataset.widths[dataset.currentcolumn])
+ texsetdimen("d_page_grd_column_width",dataset.widths[dataset.currentcolumn])
end
function columnsets.sethspan(name,span)
@@ -1150,7 +1150,7 @@ function columnsets.sethspan(name,span)
span = available
end
local width = dataset.spans[column][span]
- texsetdimen("d_page_grid_span_width",width)
+ texsetdimen("d_page_grd_span_width",width)
end
function columnsets.setlines(t)
@@ -1179,7 +1179,7 @@ end
-- state : repeat | start
-local ctx_page_grid_set_area = context.protected.page_grid_set_area
+local ctx_page_grd_set_area = context.protected.page_grd_set_area
function columnsets.flushareas(name)
local nofareas = #areas
@@ -1218,7 +1218,7 @@ function columnsets.flushareas(name)
local used = nofcolumns - overflow
left = dataset.spreads[column][used] + getdimen("backspace")
end
- ctx_page_grid_set_area(name,area.name,column,row,width,height,start,left) -- or via counters / dimens
+ ctx_page_grd_set_area(name,area.name,column,row,width,height,start,left) -- or via counters / dimens
if area.state ~= v_repeat then
area = nil
end
diff --git a/tex/context/base/mkiv/page-cst.mkiv b/tex/context/base/mkiv/page-cst.mkiv
index ed4512561..f6eede68a 100644
--- a/tex/context/base/mkiv/page-cst.mkiv
+++ b/tex/context/base/mkiv/page-cst.mkiv
@@ -18,7 +18,7 @@
\writestatus{loading}{ConTeXt Page Macros / Page Grids}
-\registerctxluafile{page-cst}{1.001}
+\registerctxluafile{page-cst}{}
\unprotect
@@ -71,41 +71,41 @@
}%
\to \everydefinepagegrid
-\newdimen \d_page_grid_column_width
-\newdimen \d_page_grid_max_height
-\newdimen \d_page_grid_max_width
-\newdimen \d_page_grid_distance
+\newdimen \d_page_grd_column_width
+\newdimen \d_page_grd_max_height
+\newdimen \d_page_grd_max_width
+\newdimen \d_page_grd_distance
-\newdimen \d_page_grid_reserved_height
-\newdimen \d_page_grid_reserved_width
-\newcount \c_page_grid_reserved_state
+\newdimen \d_page_grd_reserved_height
+\newdimen \d_page_grd_reserved_width
+\newcount \c_page_grd_reserved_state
-\newdimen \d_page_grid_gap_height
+\newdimen \d_page_grd_gap_height
-\newcount \c_page_grid_n_of_left
-\newcount \c_page_grid_n_of_right
-\newcount \c_page_grid_n_of_rows
-\newcount \c_page_grid_first_column
-\newcount \c_page_grid_last_column
+\newcount \c_page_grd_n_of_left
+\newcount \c_page_grd_n_of_right
+\newcount \c_page_grd_n_of_rows
+\newcount \c_page_grd_first_column
+\newcount \c_page_grd_last_column
-\newbox \b_page_grid_collected
-\newbox \b_page_grid_column_rest
-\newbox \b_page_grid_column
+\newbox \b_page_grd_collected
+\newbox \b_page_grd_column_rest
+\newbox \b_page_grd_column
-\unexpanded\def\setuppagegridlines{\doquadrupleempty\page_grid_setup_lines}
-\unexpanded\def\setuppagegridstart{\doquadrupleempty\page_grid_setup_start}
+\unexpanded\def\setuppagegridlines{\doquadrupleempty\page_grd_setup_lines}
+\unexpanded\def\setuppagegridstart{\doquadrupleempty\page_grd_setup_start}
-\def\page_grid_setup_lines[#1][#2][#3][#4]% id page col value
+\def\page_grd_setup_lines[#1][#2][#3][#4]% id page col value
{\clf_setcolumnsetlines{name {#1} page #2 column #3 value #4}}
-\def\page_grid_setup_start[#1][#2][#3][#4]% id page col value
+\def\page_grd_setup_start[#1][#2][#3][#4]% id page col value
{\clf_setcolumnsetstart{name {#1} page #2 column #3 value #4}}
-\unexpanded\def\page_grid_check
+\unexpanded\def\page_grd_check
{\dorecurse{\numexpr\pagegridparameter\c!nleft+\pagegridparameter\c!nright}
- {\page_grid_check_column{##1}}}
+ {\page_grd_check_column{##1}}}
-\unexpanded\def\page_grid_check_column#1%
+\unexpanded\def\page_grd_check_column#1%
{\chaintocurrentpagegrid{\currentpagegrid:#1}%
\edef\p_distance{\namedpagegridparameter{\currentpagegrid:#1}\c!distance}%
\edef\p_width {\namedpagegridparameter{\currentpagegrid:#1}\c!width}%
@@ -127,13 +127,13 @@
\unexpanded\def\startpagegrid
{\bgroup
- \dodoubleempty\page_grid_start}
+ \dodoubleempty\page_grd_start}
-\def\page_grid_start_dummy[#1][#2]%
- {\let\page_grid_stop\egroup}
+\def\page_grd_start_dummy[#1][#2]%
+ {\let\page_grd_stop\egroup}
-\def\page_grid_start[#1][#2]%
- {\let\page_grid_start\page_grid_start_dummy
+\def\page_grd_start[#1][#2]%
+ {\let\page_grd_start\page_grd_start_dummy
\ifsecondargument
\edef\currentpagegrid{#1}%
\setupcurrentpagegrid[#2]%
@@ -146,54 +146,54 @@
\let\currentpagegrid\empty
\fi\fi
\usepageparameter\pagegridparameter
- \c_page_grid_n_of_left \pagegridparameter\c!nleft\relax
- \c_page_grid_n_of_right\pagegridparameter\c!nright\relax
- \c_page_grid_n_of_rows \pagegridparameter\c!lines\relax
- \d_page_grid_max_width \pagegridparameter\c!maxwidth\relax
- \d_page_grid_max_height\pagegridparameter\c!maxheight\relax
- \d_page_grid_distance \pagegridparameter\c!distance\relax
+ \c_page_grd_n_of_left \pagegridparameter\c!nleft\relax
+ \c_page_grd_n_of_right\pagegridparameter\c!nright\relax
+ \c_page_grd_n_of_rows \pagegridparameter\c!lines\relax
+ \d_page_grd_max_width \pagegridparameter\c!maxwidth\relax
+ \d_page_grd_max_height\pagegridparameter\c!maxheight\relax
+ \d_page_grd_distance \pagegridparameter\c!distance\relax
%
- \ifcase\c_page_grid_n_of_rows
- \getrawnoflines{\dimexpr\d_page_grid_max_height-\strutheight+\topskip\relax}%
- \c_page_grid_n_of_rows\noflines
+ \ifcase\c_page_grd_n_of_rows
+ \getrawnoflines{\dimexpr\d_page_grd_max_height-\strutheight+\topskip\relax}%
+ \c_page_grd_n_of_rows\noflines
\fi
\edef\p_width{\pagegridparameter\c!width}%
\insidecolumnstrue % will be different flag in addition
\clf_resetcolumnset {
name {\currentpagegrid}
- nofrows \c_page_grid_n_of_rows
- nofleft \c_page_grid_n_of_left
- nofright \c_page_grid_n_of_right
+ nofrows \c_page_grd_n_of_rows
+ nofleft \c_page_grd_n_of_left
+ nofright \c_page_grd_n_of_right
lineheight \strutht
linedepth \strutdp
\ifx\p_width\v!auto
- % sets \d_page_grid_column_width
+ % sets \d_page_grd_column_width
\else
width \p_width
\fi
- distance \d_page_grid_distance
- maxwidth \d_page_grid_max_width
+ distance \d_page_grd_distance
+ maxwidth \d_page_grd_max_width
}%
%
- \page_grid_check
+ \page_grd_check
%
\clf_flushcolumnsetareas{\currentpagegrid}\relax
\setupoutputroutine[\s!pagegrid]%
- \page_grid_command_set_hsize
- \page_grid_command_set_vsize
+ \page_grd_command_set_hsize
+ \page_grd_command_set_vsize
}%\begingroup}
\unexpanded\def\stoppagegrid
- {\page_grid_stop}
+ {\page_grd_stop}
-\def\page_grid_stop
+\def\page_grd_stop
{\endgraf % needed, else wrong vsize in one par case
\vfill % otherwise weird \placenotes[endnotes]
- \page_otr_command_set_vsize % needed
+ \page_grd_command_set_vsize % needed
\penalty\c_page_otr_eject_penalty
- \page_grid_command_flush_page
+ \page_grd_command_flush_page
\page_otr_fill_and_eject_page
- \page_otr_command_set_vsize % needed
+ \page_grd_command_set_vsize % why here
\egroup
\page_otr_command_set_vsize
\page_otr_command_set_hsize}
@@ -215,17 +215,17 @@
\endgroup}
\unexpanded\def\setpagegrid
- {\dosingleempty\page_grid_set}
+ {\dosingleempty\page_grd_set}
-\unexpanded\def\page_grid_set[#1]%
+\unexpanded\def\page_grd_set[#1]%
{\begingroup
\letdummyparameter\c!c\zerocount
\letdummyparameter\c!r\zerocount
\letdummyparameter\c!option\v!none
\getdummyparameters[#1]%
- \dowithnextboxcs\page_grid_set_indeed\hbox}
+ \dowithnextboxcs\page_grd_set_indeed\hbox}
-\def\page_grid_set_indeed
+\def\page_grd_set_indeed
{\clf_checkcolumnset {
name {\currentpagegrid}
c \dummyparameter\c!c
@@ -233,16 +233,16 @@
box \nextbox
option {\dummyparameter\c!option}
}%
- \ifcase\c_page_grid_reserved_state
- \setbox\nextbox\vpack to \d_page_grid_reserved_height \bgroup
+ \ifcase\c_page_grd_reserved_state
+ \setbox\nextbox\vpack to \d_page_grd_reserved_height \bgroup
\vss
- \hpack to \d_page_grid_reserved_width \bgroup
+ \hpack to \d_page_grd_reserved_width \bgroup
\box\nextbox
\hss
\egroup
\vss
\egroup
- \wd\nextbox\d_page_grid_reserved_width
+ \wd\nextbox\d_page_grd_reserved_width
\clf_putincolumnset {
name {\currentpagegrid}
box \nextbox
@@ -250,31 +250,31 @@
\fi
\endgroup}
-\unexpanded\def\page_grid_command_set_vsize
+\unexpanded\def\page_grd_command_set_vsize
{\clf_setvsizecolumnset{\currentpagegrid}%
- \ifdim\d_page_grid_gap_height<\lineheight
- \page_grid_command_flush_page
+ \ifdim\d_page_grd_gap_height<\lineheight
+ \page_grd_command_flush_page
\page_otr_fill_and_eject_page
\fi
- \global\vsize\d_page_grid_gap_height
+ \global\vsize\d_page_grd_gap_height
\pagegoal\vsize}
-\unexpanded\def\page_grid_command_set_hsize
+\unexpanded\def\page_grd_command_set_hsize
{\clf_sethsizecolumnset{\currentpagegrid}%
- \hsize\d_page_grid_column_width
- \textwidth\d_page_grid_column_width}
+ \hsize\d_page_grd_column_width
+ \textwidth\d_page_grd_column_width}
-\unexpanded\def\page_grid_command_routine
+\unexpanded\def\page_grd_command_routine
{\ifvoid\normalpagebox \else
\clf_addtocolumnset{\currentpagegrid}\normalpagebox
\fi
- \page_grid_command_set_vsize
- \page_grid_command_flush_saved_floats
- \page_grid_command_set_vsize
- \ifdim\d_page_grid_gap_height<\lineheight
- \page_grid_command_flush_page
+ \page_grd_command_set_vsize
+ \page_grd_command_flush_saved_floats
+ \page_grd_command_set_vsize
+ \ifdim\d_page_grd_gap_height<\lineheight
+ \page_grd_command_flush_page
\fi
- \page_grid_command_set_vsize
+ \page_grd_command_set_vsize
\clf_flushcolumnsetrest {\currentpagegrid}\normalpagebox
\ifvoid\normalpagebox \else
\unvbox\normalpagebox
@@ -283,7 +283,7 @@
\installoutputroutine\synchronizepagegrid
{\ifvoid\normalpagebox\else
\clf_addtocolumnset{\currentpagegrid}\normalpagebox
- \page_grid_command_set_vsize
+ \page_grd_command_set_vsize
\clf_flushcolumnsetrest{\currentpagegrid}\normalpagebox
\ifvoid\normalpagebox \else
\unvbox\normalpagebox
@@ -292,82 +292,82 @@
% todo line numbers and marks
-\unexpanded\def\page_grid_command_flush_page_column#1%
+\unexpanded\def\page_grd_command_flush_page_column#1%
{\privatescratchcounter#1\relax
\clf_flushcolumnsetcolumn{\currentpagegrid}\privatescratchcounter
- \anch_mark_column_box\b_page_grid_column
- \page_marks_synchronize_column\c_page_grid_first_column\c_page_grid_last_column\privatescratchcounter\b_page_grid_column
- \ifnum\privatescratchcounter>\c_page_grid_n_of_left
- \advance\privatescratchcounter-\c_page_grid_n_of_left
- \page_lines_add_numbers_to_box\b_page_grid_column\privatescratchcounter\c_page_grid_n_of_right\plustwo
+ \anch_mark_column_box\b_page_grd_column
+ \page_marks_synchronize_column\c_page_grd_first_column\c_page_grd_last_column\privatescratchcounter\b_page_grd_column
+ \ifnum\privatescratchcounter>\c_page_grd_n_of_left
+ \advance\privatescratchcounter-\c_page_grd_n_of_left
+ \page_lines_add_numbers_to_box\b_page_grd_column\privatescratchcounter\c_page_grd_n_of_right\plustwo
\else
- \page_lines_add_numbers_to_box\b_page_grid_column\privatescratchcounter\c_page_grid_n_of_left\plustwo
+ \page_lines_add_numbers_to_box\b_page_grd_column\privatescratchcounter\c_page_grd_n_of_left\plustwo
\fi
\begingroup
\edef\currentpagegrid{\currentpagegrid:#1}%
- \inheritedpagegridframedbox\box\b_page_grid_column
+ \inheritedpagegridframedbox\box\b_page_grd_column
\endgroup}
-\unexpanded\def\page_grid_command_flush_page
+\unexpanded\def\page_grd_command_flush_page
{\deactivatecolor % puzzling, try ungrouped color \red or so
- \setbox\b_page_grid_collected\hpack\bgroup
+ \setbox\b_page_grd_collected\hpack\bgroup
\clf_preparecolumnsetflush{\currentpagegrid}%
\letpagegridparameter\c!region\currentpagegrid
\doifelse{\pagegridparameter\c!direction}\v!reverse
- {\dostepwiserecurse\c_page_grid_last_column\c_page_grid_first_column\minusone
- {\page_grid_command_flush_page_column{##1}%
+ {\dostepwiserecurse\c_page_grd_last_column\c_page_grd_first_column\minusone
+ {\page_grd_command_flush_page_column{##1}%
\ifnum##1>\plusone
\kern\namedpagegridparameter{\currentpagegrid:##1}\c!distance\relax
\fi}}%
- {\dostepwiserecurse\c_page_grid_first_column\c_page_grid_last_column\plusone
- {\page_grid_command_flush_page_column{##1}%
- \ifnum##1<\c_page_grid_last_column
+ {\dostepwiserecurse\c_page_grd_first_column\c_page_grd_last_column\plusone
+ {\page_grd_command_flush_page_column{##1}%
+ \ifnum##1<\c_page_grd_last_column
\kern\namedpagegridparameter{\currentpagegrid:##1}\c!distance\relax
\fi}}%
\clf_finishcolumnsetflush{\currentpagegrid}%
\egroup
- \page_otr_construct_and_shipout\box\b_page_grid_collected
+ \page_otr_construct_and_shipout\box\b_page_grd_collected\zerocount % three arguments
\clf_flushcolumnsetareas{\currentpagegrid}\relax
- \page_grid_command_flush_saved_floats}
+ \page_grd_command_flush_saved_floats}
% slow but robust
-\unexpanded\def\page_grid_command_next_progress
+\unexpanded\def\page_grd_command_next_progress
{\strut
- \page_otr_flush_all_floats
+ \page_grd_command_flush_all_floats
\page_otr_eject_page
\ifcase\clf_columnsetnoto\else
- \expandafter\page_grid_command_next_progress
+ \expandafter\page_grd_command_next_progress
\fi}
-\unexpanded\def\page_grid_command_handle_column
+\unexpanded\def\page_grd_command_handle_column
{\ifcase\clf_columnsetgoto{\currentpagegrid}{\page_breaks_columns_current_option}\relax\else
- \expandafter\page_grid_command_next_progress
+ \expandafter\page_grd_command_next_progress
\fi}
-\installcolumnbreakmethod\s!pagegrid\s!unknown {\page_grid_command_handle_column}
-\installcolumnbreakmethod\s!pagegrid\v!yes {\page_grid_command_handle_column}
+\installcolumnbreakmethod\s!pagegrid\s!unknown {\page_grd_command_handle_column}
+\installcolumnbreakmethod\s!pagegrid\v!yes {\page_grd_command_handle_column}
-\unexpanded\def\page_grid_command_next_page
+\unexpanded\def\page_grd_command_next_page
{\ifcase\clf_columnsetgoto{\currentpagegrid}{\v!page}\relax\else
- \page_grid_command_flush_page
+ \page_grd_command_flush_page
\fi}
-\unexpanded\def\page_grid_command_next_page_and_inserts
-% {\page_otr_eject_page_and_flush_inserts}
- {\page_otr_flush_all_floats
- \page_grid_command_next_page}
+\unexpanded\def\page_grd_command_next_page_and_inserts
+ {\page_grd_command_flush_all_floats
+ \page_grd_command_next_page}
-\let\page_grid_command_package_contents\page_one_command_package_contents
+\let\page_grd_command_flush_all_floats\page_one_command_flush_all_floats
+\let\page_grd_command_package_contents\page_one_command_package_contents
-\unexpanded\def\page_grid_command_flush_saved_floats
+\unexpanded\def\page_grd_command_flush_saved_floats
{\ifconditional\c_page_floats_flushing \else
\ifconditional\c_page_floats_some_waiting
- \page_grid_command_flush_saved_floats_indeed
+ \page_grd_command_flush_saved_floats_indeed
\fi
\fi}
-\unexpanded\def\page_grid_command_flush_saved_floats_indeed
+\unexpanded\def\page_grd_command_flush_saved_floats_indeed
{\page_floats_flush\s!text\plusone
\clf_checkcolumnset {
name {\currentpagegrid}
@@ -375,11 +375,11 @@
width \wd\floatbox
height \ht\floatbox
}%
- \ifcase\c_page_grid_reserved_state
- \page_grid_place_float_here_indeed
- \page_grid_command_set_vsize % needed
+ \ifcase\c_page_grd_reserved_state
+ \page_grd_place_float_here_indeed
+ \page_grd_command_set_vsize % needed
\ifconditional\c_page_floats_some_waiting
- \doubleexpandafter\page_grid_command_flush_saved_floats_indeed
+ \doubleexpandafter\page_grd_command_flush_saved_floats_indeed
\fi
\else
\page_floats_resave\s!text
@@ -387,48 +387,52 @@
% needs checking
-\unexpanded\def\page_grid_command_flush_floats
+\unexpanded\def\page_grd_command_flush_floats
{\wait\global\settrue\c_page_floats_flushing
\ifconditional\c_page_floats_some_waiting
\par
- \page_grid_command_flush_floats_indeed
+ \page_grd_command_flush_floats_indeed
\fi
\global\savednoffloats\zerocount
\global\setfalse\c_page_floats_some_waiting
\global\setfalse\c_page_floats_flushing}
-\def\page_grid_command_flush_floats_indeed % much in common with OTRSET
+\def\page_grd_command_flush_floats_indeed % much in common with OTRSET
{\wait\ifconditional\c_page_floats_some_waiting
\ifconditional\c_page_floats_pack_flushed
- \setfalse\c_page_floats_center_box % not needed as we do call directly
\page_floats_collect\s!text\hsize\emwidth
- \global\setbox\floatbox\hbox to \hsize
- {\hfil
- \dorecurse\nofcollectedfloats
- {\ifcase\columndirection % nog document wide
- \page_floats_flush\s!text\plusone
- \else
- \page_floats_flush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
- \fi
- \ifdim\wd\floatbox>\makeupwidth % \hsize
- \hbox to \makeupwidth{\hss\box\floatbox\hss}%
- \else
- \box\floatbox
- \fi
- \ifnum\recurselevel<\nofcollectedfloats
- \hfil
- \fi}%
- \hfil}%
+ \ifcase\nofcollectedfloats
+ \page_floats_get
+ \else
+ \setfalse\c_page_floats_center_box % not needed as we do call directly
+ \global\setbox\floatbox\hbox to \hsize
+ {\hfil
+ \dorecurse\nofcollectedfloats
+ {\ifcase\columndirection % nog document wide
+ \page_floats_flush\s!text\plusone
+ \else
+ \page_floats_flush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
+ \fi
+ \ifdim\wd\floatbox>\makeupwidth % \hsize
+ \hbox to \makeupwidth{\hss\box\floatbox\hss}%
+ \else
+ \box\floatbox
+ \fi
+ \ifnum\recurselevel<\nofcollectedfloats
+ \hfil
+ \fi}%
+ \hfil}%
+ \fi
\else
\page_floats_get
\fi
\doplacefloatbox
- \expandafter\page_grid_command_flush_floats_indeed
+ \expandafter\page_grd_command_flush_floats_indeed
\fi}
% so far
-\unexpanded\def\page_grid_command_check_if_float_fits
+\unexpanded\def\page_grd_command_check_if_float_fits
{\clf_checkcolumnset {
name {\currentpagegrid}
method {\floatmethod}
@@ -436,16 +440,16 @@
% r \zerocount
box \floatbox
}%
- \ifcase\c_page_grid_reserved_state
+ \ifcase\c_page_grd_reserved_state
\global\settrue\c_page_floats_room
\else
\global\setfalse\c_page_floats_room
\fi}
-\unexpanded\def\page_grid_place_float_here_indeed
- {\setbox\floatbox\vpack to \d_page_grid_reserved_height \bgroup
+\unexpanded\def\page_grd_place_float_here_indeed
+ {\setbox\floatbox\vpack to \d_page_grd_reserved_height \bgroup
\vss
- \hpack to \d_page_grid_reserved_width \bgroup
+ \hpack to \d_page_grd_reserved_width \bgroup
% \hss % no
\box\floatbox
\hss
@@ -457,7 +461,7 @@
box \floatbox
}}
-\def\page_grid_place_float_slot
+\def\page_grd_place_float_slot
{% safeguard
\ifx\floatmethod\empty
\let\floatmethod\v!here
@@ -466,7 +470,7 @@
\penalty\c_page_otr_eject_penalty
% push
\setbox\savedfloatbox\box\floatbox
- \page_grid_command_flush_saved_floats
+ \page_grd_command_flush_saved_floats
\setbox\floatbox\box\savedfloatbox
% pop
\ifconditional\c_page_floats_some_waiting
@@ -484,15 +488,15 @@
\fi
box \floatbox
}%
- \ifcase\c_page_grid_reserved_state
- \page_grid_place_float_here_indeed
+ \ifcase\c_page_grd_reserved_state
+ \page_grd_place_float_here_indeed
\else
\page_floats_save\s!text
\nonoindentation
\fi
\fi}
-\def\page_grid_place_float_fixed % todo: fallback on here
+\def\page_grd_place_float_fixed % todo: fallback on here
{\ifx\floatcolumn\empty
\let\floatmethod\v!here
\else\ifx\floatrow\empty
@@ -500,9 +504,9 @@
\else
\let\floatmethod\v!fixed
\fi\fi
- \page_grid_place_float_slot}
+ \page_grd_place_float_slot}
-\def\page_grid_place_float_force
+\def\page_grd_place_float_force
{% synchronize
\penalty\c_page_otr_eject_penalty
\clf_checkcolumnset {
@@ -510,30 +514,30 @@
method {\floatmethod}
box \floatbox
}%
- \ifcase\c_page_grid_reserved_state
- \page_grid_place_float_here_indeed
+ \ifcase\c_page_grd_reserved_state
+ \page_grd_place_float_here_indeed
\else
\page_floats_save\s!text
\nonoindentation
\fi}
-\def\page_grid_place_float_page {\page_grid_place_float_slot} % todo: fallback on here
+\def\page_grd_place_float_page {\page_grd_place_float_slot} % todo: fallback on here
-\def\page_grid_place_float_here {\let\floatmethod\v!here\page_grid_place_float_slot}
-\def\page_grid_place_float_top {\page_grid_place_float_slot}
-\def\page_grid_place_float_bottom{\page_grid_place_float_slot}
+\def\page_grd_place_float_here {\let\floatmethod\v!here\page_grd_place_float_slot}
+\def\page_grd_place_float_top {\page_grd_place_float_slot}
+\def\page_grd_place_float_bottom{\page_grd_place_float_slot}
-\installfloatmethod \s!pagegrid \v!here \page_grid_place_float_here
-\installfloatmethod \s!pagegrid \v!force \page_grid_place_float_force % todo
+\installfloatmethod \s!pagegrid \v!here \page_grd_place_float_here
+\installfloatmethod \s!pagegrid \v!force \page_grd_place_float_force % todo
%installfloatmethod \s!pagegrid \v!left
%installfloatmethod \s!pagegrid \v!right
%installfloatmethod \s!pagegrid \v!text
-\installfloatmethod \s!pagegrid \v!top \page_grid_place_float_top
-\installfloatmethod \s!pagegrid \v!bottom \page_grid_place_float_bottom
+\installfloatmethod \s!pagegrid \v!top \page_grd_place_float_top
+\installfloatmethod \s!pagegrid \v!bottom \page_grd_place_float_bottom
%installfloatmethod \s!pagegrid \v!auto
%installfloatmethod \s!pagegrid \v!margin
%installfloatmethod \s!pagegrid \v!opposite
-\installfloatmethod \s!pagegrid \v!page \page_grid_place_float_page
+\installfloatmethod \s!pagegrid \v!page \page_grd_place_float_page
%installfloatmethod \s!pagegrid \v!leftpage
%installfloatmethod \s!pagegrid \v!rightpage
%installfloatmethod \s!pagegrid \v!inmargin
@@ -546,54 +550,58 @@
%installfloatmethod \s!pagegrid \v!somewhere
%installfloatmethod \s!pagegrid \v!backspace
%installfloatmethod \s!pagegrid \v!cutspace
-\installfloatmethod \s!pagegrid \s!tblr \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!lrtb \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!tbrl \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!rltb \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!fxtb \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!btlr \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!lrbt \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!btrl \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!rlbt \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!fxbt \page_grid_place_float_slot
-\installfloatmethod \s!pagegrid \s!fixd \page_grid_place_float_fixed
+\installfloatmethod \s!pagegrid \s!tblr \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!lrtb \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!tbrl \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!rltb \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!fxtb \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!btlr \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!lrbt \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!btrl \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!rlbt \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!fxbt \page_grd_place_float_slot
+\installfloatmethod \s!pagegrid \s!fixd \page_grd_place_float_fixed
%
-\unexpanded\def\page_grid_command_side_float_output
+\unexpanded\def\page_grd_command_side_float_output
{} % nothing, reset anyway
-\unexpanded\def\page_grid_command_flush_side_floats
+\unexpanded\def\page_grd_command_flush_side_floats
{\page_sides_forget_floats}
-\unexpanded\def\page_grid_command_synchronize_side_floats
+\unexpanded\def\page_grd_command_synchronize_side_floats
{\page_sides_forget_floats}
-\unexpanded\def\page_grid_command_synchronize_hsize
- {\page_grid_command_set_hsize}
+\unexpanded\def\page_grd_command_synchronize_hsize
+ {\page_grd_command_set_hsize}
+
+\unexpanded\def\page_grd_command_flush_all_floats
+ {\page_one_command_flush_all_floats}
\defineoutputroutine
[\s!pagegrid]
- [\s!page_otr_command_routine =\page_grid_command_routine,
- \s!page_otr_command_package_contents =\page_grid_command_package_contents,
- \s!page_otr_command_set_vsize =\page_grid_command_set_vsize,
- \s!page_otr_command_set_hsize =\page_grid_command_set_hsize, % tricky, goes wrong
- \s!page_otr_command_next_page =\page_grid_command_next_page,
- \s!page_otr_command_next_page_and_inserts =\page_grid_command_next_page_and_inserts,
- \s!page_otr_command_synchronize_hsize =\page_grid_command_synchronize_hsize,
- % \s!page_otr_command_set_top_insertions =\page_grid_command_set_top_insertions,
- % \s!page_otr_command_set_bottom_insertions =\page_grid_command_set_bottom_insertions,
- % \s!page_otr_command_flush_top_insertions =\page_grid_command_flush_top_insertions,
- % \s!page_otr_command_flush_bottom_insertions =\page_grid_command_flush_bottom_insertions,
- % \s!page_otr_command_set_float_hsize =\page_grid_command_set_float_hsize,
- \s!page_otr_command_check_if_float_fits =\page_grid_command_check_if_float_fits,
- % \s!page_otr_command_flush_float_box =\page_grid_command_flush_float_box,
- \s!page_otr_command_synchronize_side_floats =\page_grid_command_synchronize_side_floats,
- \s!page_otr_command_side_float_output =\page_grid_command_side_float_output,
- \s!page_otr_command_flush_floats =\page_grid_command_flush_floats,
- \s!page_otr_command_flush_side_floats =\page_grid_command_flush_side_floats,
- \s!page_otr_command_flush_saved_floats =\page_grid_command_flush_saved_floats
- % \s!page_otr_command_flush_margin_blocks =\page_grid_command_flush_margin_blocks, % not used
+ [\s!page_otr_command_routine =\page_grd_command_routine,
+ \s!page_otr_command_package_contents =\page_grd_command_package_contents,
+ \s!page_otr_command_set_vsize =\page_grd_command_set_vsize,
+ \s!page_otr_command_set_hsize =\page_grd_command_set_hsize, % tricky, goes wrong
+ \s!page_otr_command_synchronize_hsize =\page_grd_command_synchronize_hsize,
+ \s!page_otr_command_next_page =\page_grd_command_next_page,
+ \s!page_otr_command_next_page_and_inserts =\page_grd_command_next_page_and_inserts,
+ % \s!page_otr_command_set_top_insertions =\page_grd_command_set_top_insertions,
+ % \s!page_otr_command_set_bottom_insertions =\page_grd_command_set_bottom_insertions,
+ % \s!page_otr_command_flush_top_insertions =\page_grd_command_flush_top_insertions,
+ % \s!page_otr_command_flush_bottom_insertions =\page_grd_command_flush_bottom_insertions,
+ \s!page_otr_command_check_if_float_fits =\page_grd_command_check_if_float_fits,
+ % \s!page_otr_command_set_float_hsize =\page_grd_command_set_float_hsize,
+ % \s!page_otr_command_flush_float_box =\page_grd_command_flush_float_box,
+ \s!page_otr_command_synchronize_side_floats =\page_grd_command_synchronize_side_floats,
+ \s!page_otr_command_side_float_output =\page_grd_command_side_float_output,
+ \s!page_otr_command_flush_floats =\page_grd_command_flush_floats,
+ \s!page_otr_command_flush_side_floats =\page_grd_command_flush_side_floats,
+ \s!page_otr_command_flush_saved_floats =\page_grd_command_flush_saved_floats,
+ \s!page_otr_command_flush_all_floats =\page_grd_command_flush_all_floats,
+ % \s!page_otr_command_flush_margin_blocks =\page_grd_command_flush_margin_blocks, % not used
]
% spans
@@ -613,18 +621,18 @@
\c!n=\plustwo,
\c!nlines=\zerocount,
\c!align=\v!normal,
- \c!width=\d_page_grid_span_width,
+ \c!width=\d_page_grd_span_width,
\c!indenting=,
\c!indentnext=\v!yes,
\c!default=\v!here,
\c!alternative=\v!a]
-\newdimen\d_page_grid_span_width
+\newdimen\d_page_grd_span_width
\unexpanded\def\startpagegridspan
- {\dotripleempty\page_grid_span_start}
+ {\dotripleempty\page_grd_span_start}
-\def\page_grid_span_start[#1][#2][#3]% [#3] gobbles space
+\def\page_grd_span_start[#1][#2][#3]% [#3] gobbles space
{\endgraf % else rubish output if forgotten
\synchronizepagegrid
\bgroup
@@ -652,9 +660,9 @@
\def\pagegridspanwidth#1% assumes equal distances
{\the\dimexpr
- #1\d_page_grid_column_width
- +#1\d_page_grid_distance
- - \d_page_grid_distance
+ #1\d_page_grd_column_width
+ +#1\d_page_grd_distance
+ - \d_page_grd_distance
\relax}
% areas
@@ -702,15 +710,15 @@
\to \everydefinepagegridarea
\unexpanded\def\setuppagegridareatext
- {\dodoubleargument\page_grid_set_area_text}
+ {\dodoubleargument\page_grd_set_area_text}
-\def\page_grid_set_area_text[#1][#2]%
+\def\page_grd_set_area_text[#1][#2]%
{\edef\currentpagegridarea{#1}%
\setpagegridareaparameter\c!text{#2}}
% maybe move the left/right correction to the tex end or the offset to lua
-\unexpanded\def\page_grid_set_area#1#2#3#4#5#6#7#8% can be optimized
+\unexpanded\def\page_grd_set_area#1#2#3#4#5#6#7#8% can be optimized
{\begingroup
\edef\currentpagegridarea{#2}%
\setpagegridareaparameter\c!width {#5\scaledpoint}%
@@ -753,7 +761,7 @@
\fi
\endgroup}
-\let\setpagegridarea\page_grid_set_area
+\let\setpagegridarea\page_grd_set_area
% state start | repeat
diff --git a/tex/context/base/mkiv/page-ffl.mkiv b/tex/context/base/mkiv/page-ffl.mkiv
new file mode 100644
index 000000000..5536371a7
--- /dev/null
+++ b/tex/context/base/mkiv/page-ffl.mkiv
@@ -0,0 +1,211 @@
+%D \module
+%D [ file=page-ffl,
+%D version=2018.01.04,
+%D title=\CONTEXT\ Page Macros,
+%D subtitle=Facing floats,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Page Macros / Facing floats}
+
+%D The code below comes from a module made for Thomas Schmitz and is now part of the
+%D core. A simple example is given here:
+%D
+%D \starttyping
+%D \definefacingfloat
+%D [whatever]
+%D
+%D \setupfacingfloat
+%D [whatever]
+%D [style=bold,
+%D color=white]
+%D
+%D \setupfacingfloat
+%D [whatever:left]
+%D [background=color,
+%D backgroundcolor=red]
+%D
+%D \setupfacingfloat
+%D [whatever:right]
+%D [background=color,
+%D backgroundcolor=green]
+%D
+%D \startfacingfloat[whatever]
+%D {\dorecurse{10}{\samplefile{tufte} }}
+%D {\dorecurse{10}{\samplefile{ward} }}
+%D {\dorecurse{10}{\samplefile{tufte} }}
+%D {\dorecurse{10}{\samplefile{ward} }}
+%D \stopfacingfloat
+%D
+%D \startfacingfloat[whatever]
+%D \startcontent \dorecurse{10}{\samplefile{tufte} } \stopcontent
+%D \startcontent \dorecurse{10}{\samplefile{ward} } \stopcontent
+%D \startcontent \dorecurse{10}{\samplefile{tufte} } \stopcontent
+%D \startcontent \dorecurse{10}{\samplefile{ward} } \stopcontent
+%D \stopfacingfloat
+%D
+%D \dorecurse{10}{\samplefile{sapolsky} }
+%D \stoptyping
+%D
+%D The idea is to flush related floats more or less in parallel.
+
+\unprotect
+
+\installcorenamespace {facingfloat}
+
+\installframedcommandhandler \??facingfloat {facingfloat} \??facingfloat
+
+\setupfacingfloat
+ [\c!spaceinbetween=\v!big,
+ \c!inbetween={\blank[\v!big]},
+ %\c!style,
+ %\c!color,
+ \c!page=\v!yes]
+
+\appendtoks
+ \ifx\currentfacingfloatparent\empty
+ \normalexpanded{\definefacingfloat[\currentfacingfloat:\v!left ][\currentfacingfloat]}%
+ \normalexpanded{\definefacingfloat[\currentfacingfloat:\v!right][\currentfacingfloat]}%
+ \fi
+\to \everydefinefacingfloat
+
+\newcount\c_strc_floats_saved
+\newcount\c_strc_floats_flushed
+
+\newbox\b_strc_floats_facing_l
+\newbox\b_strc_floats_facing_r
+
+\let\m_strc_floats_state\relax
+
+\def\strc_floats_facing_flush
+ {\ifnum\c_strc_floats_flushed<\c_strc_floats_saved
+ \global\advance\c_strc_floats_flushed\plusone
+ \floatingpenalty\zerocount
+ \insert\namedinsertionnumber\s!topfloat\bgroup
+ \forgetall
+ \ifconditional\c_page_one_top_of_insert
+ \ifconditional\c_page_one_correct_top_insert
+ \topskipcorrection % [xx] new: see icare topbleed
+ \kern-\lineskip
+ \par
+ \prevdepth\maxdimen
+ \fi
+ \fi
+ \directboxfromcache{\currentfacingfloat}{\number\c_strc_floats_flushed}%
+ \vskip\s_page_one_between_top_insert
+ \egroup
+ \ifnum\c_strc_floats_saved=\c_strc_floats_flushed
+ \global\c_strc_floats_saved \zerocount
+ \global\c_strc_floats_flushed\zerocount
+ \resetboxesincache{\currentfacingfloat}%
+ \fi
+ \fi}
+
+\unexpanded\def\strc_floats_facing_setup
+ {\edef\currentfacingfloat{\currentfacingfloat:\m_strc_floats_state}%
+ \usefacingfloatstyleandcolor\c!style\v!color}
+
+\unexpanded\def\strc_floats_facing_collect
+ {\ifx\m_strc_floats_state\v!left
+ \ifvoid\nextbox\else\ifzeropt\wd\nextbox\else
+ \ifvoid\b_strc_floats_facing_l
+ \setbox\b_strc_floats_facing_l\box\nextbox
+ \else
+ \setbox\b_strc_floats_facing_l\vbox\bgroup
+ \unvbox\b_strc_floats_facing_l
+ \facingfloatparameter\c!inbetween
+ \unvbox\nextbox
+ \egroup
+ \fi
+ \fi\fi
+ \let\m_strc_floats_state\v!right
+ \else\ifx\m_strc_floats_state\v!right
+ \ifvoid\nextbox\else\ifzeropt\wd\nextbox\else
+ \ifvoid\b_strc_floats_facing_r
+ \setbox\b_strc_floats_facing_r\box\nextbox
+ \else
+ \setbox\b_strc_floats_facing_r\vbox\bgroup
+ \unvbox\b_strc_floats_facing_r
+ \facingfloatparameter\c!inbetween
+ \unvbox\nextbox
+ \egroup
+ \fi
+ \fi\fi
+ \let\m_strc_floats_state\v!left
+ \else
+ \let\m_strc_floats_state\v!left
+ \fi\fi}
+
+\unexpanded\def\strc_floats_facing_handle
+ {\strc_floats_facing_collect
+ \doifnextbgroupelse
+ \strc_floats_facing_handle_indeed
+ \strc_floats_wrap_up}
+
+\unexpanded\def\strc_floats_facing_handle_indeed
+ {\dowithnextboxcontent
+ \strc_floats_facing_setup
+ \strc_floats_facing_handle
+ \vbox}
+
+\unexpanded\def\startfacingfloat[#1]%
+ {\begingroup
+ % todo: \usepageparameter
+ \edef\p_page{\facingfloatparameter\c!page}%
+ \ifx\p_page\empty
+ \page[\p_page]%
+ \fi
+ %
+ \let\startcontent\bgroup
+ \let\stopcontent\egroup
+ \def\currentfacingfloat{#1}%
+ \strc_floats_facing_handle}
+
+\unexpanded\def\stopfacingfloat
+ {\endgroup}
+
+\unexpanded\def\strc_floats_wrap_up
+ {\edef\p_spaceinbetween{\facingfloatparameter\c!spaceinbetween}%
+ \ifx\p_spaceinbetween\empty
+ \scratchdimen\zeropoint
+ \else
+ \setbox\scratchbox\vbox{\directvspacing\p_spaceinbetween}%
+ \scratchdimen\htdp\scratchbox
+ \fi
+ \doloop{%
+ \strc_floats_facing_flush_wrap\b_strc_floats_facing_l\v!left
+ \strc_floats_facing_flush_wrap\b_strc_floats_facing_r\v!right
+ \ifvoid\b_strc_floats_facing_l\relax\ifvoid\b_strc_floats_facing_r\relax
+ \exitloop
+ \fi\fi}}
+
+\def\strc_floats_facing_flush_wrap#1#2%
+ {\ifvoid#1\relax
+ % todo
+ \else
+ \begingroup
+ \setbox\scratchbox\vsplit#1 upto \textheight
+ \setbox\scratchbox\hpack\bgroup
+ \edef\currentfacingfloat{\currentfacingfloat:#2}%
+ \inheritedfacingfloatframed{\box\scratchbox}%
+ \egroup
+ \ifdim\ht\scratchbox<\dimexpr\textheight-\scratchdimen\relax
+ \setbox\scratchbox\vbox{\box\scratchbox\directvspacing\p_spaceinbetween}%
+ \else
+ \setbox\scratchbox\vbox to \textheight{\box\scratchbox\vss}%
+ \fi
+ \global\advance\c_strc_floats_saved\plusone
+ \putboxincache{\currentfacingfloat}{\number\c_strc_floats_saved}\scratchbox
+ \endgroup
+ \fi}
+
+\appendtoks
+ \strc_floats_facing_flush
+\to \everyafteroutput
+
+\protect \endinput
diff --git a/tex/context/base/mkiv/page-flt.lua b/tex/context/base/mkiv/page-flt.lua
index 53780e420..41a35d47b 100644
--- a/tex/context/base/mkiv/page-flt.lua
+++ b/tex/context/base/mkiv/page-flt.lua
@@ -9,12 +9,17 @@ if not modules then modules = { } end modules ['page-flt'] = {
-- floats -> managers.floats
-- some functions are a tex/lua mix so we need a separation
+local next = next
+local tostring = tostring
local insert, remove = table.insert, table.remove
local find = string.find
+local abs = math.abs
-local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change
+local trace_floats = false trackers.register("floats.caching", function(v) trace_floats = v end)
+local trace_collecting = false trackers.register("floats.collecting", function(v) trace_collecting = v end)
-local report_floats = logs.reporter("structure","floats")
+local report_floats = logs.reporter("floats","caching")
+local report_collecting = logs.reporter("floats","collecting")
local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match
@@ -22,6 +27,7 @@ local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match
-- text page leftpage rightpage (todo: top, bottom, margin, order)
local setdimen = tex.setdimen
+local getdimen = tex.getdimen
local setcount = tex.setcount
local texsetbox = tex.setbox
local textakebox = nodes.takebox
@@ -78,14 +84,25 @@ function floats.pop()
end
end
-local function setdimensions(b)
- local w, h, d = 0, 0, 0
+local function setdimensions(t,b)
+ local bw, bh, bd = 0, 0, 0
+ local nw, nh, nd = 0, 0, 0
if b then
- w, h, d = b.width, b.height, b.depth
+ bw = b.width
+ bh = b.height
+ bd = b.depth
end
- setdimen("global","floatwidth", w)
- setdimen("global","floatheight", h+d)
- return w, h, d
+ if t then
+ nw = t.width or bw
+ nh = t.height or bh
+ nd = t.depth or bd
+ end
+ setdimen("global","floatwidth", bw)
+ setdimen("global","floatheight", bh+bd)
+ setdimen("global","naturalfloatwd", nw)
+ setdimen("global","naturalfloatht", nh)
+ setdimen("global","naturalfloatdp", nd)
+ return bw, bh, bd, nw, nh, dp
end
local function get(stack,n,bylabel)
@@ -115,9 +132,12 @@ function floats.save(which,data)
local stack = stacks[which]
noffloats = noffloats + 1
local t = {
- n = noffloats,
- data = data or { },
- box = b,
+ n = noffloats,
+ data = data or { },
+ width = getdimen("naturalfloatwd"),
+ height = getdimen("naturalfloatht"),
+ depth = getdimen("naturalfloatdp"),
+ box = b,
}
insert(stack,t)
-- inspect(stacks)
@@ -138,9 +158,11 @@ function floats.resave(which)
which = which or default
local stack = stacks[which]
local b = textakebox("floatbox")
+ if not b then
+ report_floats("resaved float is empty")
+ end
last.box = b
insert(stack,1,last)
--- inspect(stacks)
setcount("global","savednoffloats",#stacks[default])
if trace_floats then
report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",
@@ -155,15 +177,14 @@ end
function floats.flush(which,n,bylabel)
which = which or default
--- inspect(stacks)
local stack = stacks[which]
local t, b, n = get(stack,n or 1,bylabel)
if t then
if not b then
showmessage("floatblocks",1,t.n)
end
+ local w, h, d = setdimensions(t,b)
if trace_floats then
- local w, h, d = setdimensions(b) -- ?
report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",
which,t.n,n,w,h,d)
else
@@ -172,7 +193,7 @@ function floats.flush(which,n,bylabel)
texsetbox("floatbox",b)
last = remove(stack,n)
last.box = nil
- setcount("global","savednoffloats",#stacks[default]) -- default?
+ setcount("global","savednoffloats",#stacks[which]) -- default?
else
setdimensions()
end
@@ -183,7 +204,7 @@ function floats.consult(which,n)
local stack = stacks[which]
local t, b, n = get(stack,n)
if t then
- local w, h, d = setdimensions(b)
+ local w, h, d = setdimensions(t,b)
if trace_floats then
report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",
which,t.n,n,w,h,d)
@@ -198,16 +219,46 @@ function floats.consult(which,n)
end
function floats.collect(which,maxwidth,distance)
- which = which or default
- local stack = stacks[which]
- local n, m = #stack, 0
- for i=1,n do
+ local usedwhich = which or default
+ local stack = stacks[usedwhich]
+ local stacksize = #stack
+ local collected = 0
+ local maxheight = 0
+ local maxdepth = 0
+
+ local function register()
+ collected = collected + 1
+ maxwidth = rest
+ if h > maxheight then
+ maxheight = h
+ end
+ if d > maxdepth then
+ maxdepth = d
+ end
+ end
+
+ for i=1,stacksize do
local t, b, n = get(stack,i)
if t then
- local w, h, d = setdimensions(b)
- if w + distance < maxwidth then
- m = m + 1
- maxwidth = maxwidth - w - distance
+ local w, h, d, nw = setdimensions(t,b)
+ -- we use the real width
+ w = nw
+ -- which could be an option
+ local rest = maxwidth - w - (1 == 1 and 0 or distance)
+ local fits = rest > -10
+ if trace_collecting then
+ report_collecting("%s, category %a, number %a, slot %a width %p, rest %p, fit %a","collecting",
+ usedwhich,t.n,n,w,rest,fits)
+ end
+ if fits then
+ collected = collected + 1
+ maxwidth = rest
+ if h > maxheight then
+ maxheight = h
+ end
+ if d > maxdepth then
+ maxdepth = d
+ end
else
break
end
@@ -215,10 +266,8 @@ function floats.collect(which,maxwidth,distance)
break
end
end
- if m == 0 then
- m = 1
- end
- setcount("global","nofcollectedfloats",m)
+ setcount("global","nofcollectedfloats",collected)
+ setdimen("global","maxcollectedfloatstotal",maxheight+maxdepth)
end
function floats.getvariable(name,default)
diff --git a/tex/context/base/mkiv/page-flt.mkiv b/tex/context/base/mkiv/page-flt.mkiv
index c514496df..a06c90ec1 100644
--- a/tex/context/base/mkiv/page-flt.mkiv
+++ b/tex/context/base/mkiv/page-flt.mkiv
@@ -16,26 +16,43 @@
%D This module has code that previously was in other modules. There is
%D also float related code in \type {strc-flt.mkiv}.
-\registerctxluafile{page-flt}{1.001}
+\registerctxluafile{page-flt}{}
\unprotect
\ifdefined\s!topfloat \else \def\s!topfloat {topfloat} \fi
\ifdefined\s!bottomfloat \else \def\s!bottomfloat{bottomfloat} \fi
+\ifdefined\s!pagefloat \else \def\s!pagefloat {pagefloat} \fi
\defineinsertion[\s!topfloat]
\defineinsertion[\s!bottomfloat]
+\defineinsertion[\s!pagefloat]
\newdimen \d_page_floats_inserted_bottom
\newdimen \d_page_floats_inserted_top
+\newdimen \d_page_floats_inserted_page
\newcount \c_page_floats_n_of_top \c_page_floats_n_of_top \plustwo
\newcount \c_page_floats_n_of_bottom \c_page_floats_n_of_bottom\zerocount
+\newcount \c_page_floats_n_of_page \c_page_floats_n_of_page \plustwo
\newconstant\c_page_floats_insertions_topskip_mode % 1 = no topskip
-%def\page_floats_report_saved {\showmessage\m!floatblocks2{\the\totalnoffloats}}
-\def\page_floats_report_total {\showmessage\m!floatblocks4{\the\totalnoffloats}}
-\def\page_floats_report_flushed{\showmessage\m!floatblocks3{\the\numexpr\totalnoffloats-\savednoffloats\relax}}
+% \def\page_floats_report_saved
+% {\showmessage\m!floatblocks2
+% {\the\totalnoffloats}}
+
+\def\page_floats_report_total
+ {\showmessage\m!floatblocks4%
+ {\the\totalnoffloats
+ \ifx\floatlocationmethod\empty
+ \ifx\floatlocation\empty\else,\floatlocation\fi
+ \else
+ ,\floatlocationmethod
+ \fi}}
+
+\def\page_floats_report_flushed
+ {\showmessage\m!floatblocks3%
+ {\the\numexpr\totalnoffloats-\savednoffloats\relax}}
%D Extra float registers.
@@ -50,11 +67,12 @@
%D For the moment we keep this but they will become private too.
-\newcount\totalnoffloats % these will be redone ... handled at the lua end anyway
-\newcount\savednoffloats % these will be redone ... handled at the lua end anyway
-\newcount\nofcollectedfloats % communication channel
+\newcount\totalnoffloats % these will be redone ... handled at the lua end anyway
+\newcount\savednoffloats % these will be redone ... handled at the lua end anyway
+\newcount\nofcollectedfloats % communication channel
+\newdimen\maxcollectedfloatstotal % communication channel
-\newcount\noffloatinserts % these will be redone ... handled at the lua end anyway
+\newcount\noffloatinserts % these will be redone ... handled at the lua end anyway
\newbox \floattext
@@ -189,12 +207,14 @@
%D
%D First we reimplement some helpers.
+\def\page_floats_get_used_hsize{\hsize}
+
\unexpanded\def\page_floats_get
{\ifconditional\c_page_floats_some_waiting
\page_floats_flush\s!text\plusone
\ifconditional\c_page_floats_center_box
- \ifdim\wd\globalscratchbox<\hsize
- \global\setbox\floatbox\hpack to \hsize{\hss\box\floatbox\hss}%
+ \ifdim\wd\globalscratchbox<\page_floats_get_used_hsize
+ \global\setbox\floatbox\hpack to \page_floats_get_used_hsize{\hss\box\floatbox\hss}%
\else
% retain special alignments
\ifinsidecolumns
@@ -264,6 +284,8 @@
\doifnotinset\v!low\floatspecification\vfill}%
\page_otr_fill_and_eject_page}
+\let\m_page_otf_checked_page_float\relax
+
\unexpanded\def\page_floats_flush_page_floats % used in postpone
{\edef\m_page_otf_checked_page_float{\clf_checkedpagefloat}%
\ifx\m_page_otf_checked_page_float\empty
diff --git a/tex/context/base/mkiv/page-grd.mkiv b/tex/context/base/mkiv/page-grd.mkiv
deleted file mode 100644
index 4125eb46a..000000000
--- a/tex/context/base/mkiv/page-grd.mkiv
+++ /dev/null
@@ -1,106 +0,0 @@
-%D \module
-%D [ file=page-grd, % moved from page-ini
-%D version=2011.12.07, % 2000.10.20,
-%D title=\CONTEXT\ Page Macros,
-%D subtitle=Grids,
-%D author=Hans Hagen,
-%D date=\currentdate,
-%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
-%C
-%C This module is part of the \CONTEXT\ macro||package and is
-%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
-%C details.
-
-\writestatus{loading}{ConTeXt Page Macros / Grids}
-
-\unprotect
-
-\newconstant\c_page_grids_location
-\newconstant\c_page_grids_line_mode
-\newconstant\c_page_grids_lineno_mode
-\newconstant\c_page_grids_columns_mode
-
-\unexpanded\def\showgrid
- {\dosingleempty\page_grids_show}
-
-\def\page_grids_show[#1]%
- {\c_page_grids_location \plusone % downward compatible default
- \c_page_grids_line_mode \plusone
- \c_page_grids_lineno_mode \plusone
- \c_page_grids_columns_mode\plusone
- \processallactionsinset
- [#1]%
- [ \v!reset=>\c_page_grids_location \zerocount
- \c_page_grids_columns_mode\zerocount,
- \v!bottom=>\c_page_grids_location \plusone,
- \v!top=>\c_page_grids_location \plustwo,
- \v!none=>\c_page_grids_line_mode \zerocount,
- \v!all=>\c_page_grids_line_mode \plusone,
- \v!lines=>\c_page_grids_line_mode \plustwo,
- \v!frame=>\c_page_grids_line_mode \plusthree,
- \v!nonumber=>\c_page_grids_lineno_mode \zerocount,
- \v!right=>\c_page_grids_lineno_mode \plusone,
- \v!left=>\c_page_grids_lineno_mode \plustwo,
- \v!outer=>\c_page_grids_lineno_mode \plusthree,
- \v!columns=>\c_page_grids_columns_mode\plusone]% new option
- \ifcase\c_page_grids_location
- \let\page_grids_add_to_box\gobbleoneargument
- \else % 1=bottom 2=top
- \let\page_grids_add_to_box\page_grids_add_to_box_indeed
- \fi
- \ifcase\c_page_grids_columns_mode
- \let\page_grids_add_to_one\gobbleoneargument
- \let\page_grids_add_to_mix\gobbleoneargument
- \else
- \let\page_grids_add_to_one\page_grids_add_to_one_indeed
- \let\page_grids_add_to_mix\page_grids_add_to_mix_indeed
- \fi}
-
-% if really needed for speed we can cache the grid
-
-\let\page_grids_add_to_box\gobbleoneargument
-\let\page_grids_add_to_one\gobbleoneargument
-\let\page_grids_add_to_mix\gobbleoneargument
-
-\def\page_grids_add_to_box_indeed#1% to be checked for color and layer ..... use mp
- {\startcolor[layout:grid]%
- \resetvisualizers
- \gridboxlinemode \c_page_grids_line_mode
- \gridboxlinenomode\c_page_grids_lineno_mode
- \setgridbox\scratchbox\makeupwidth\textheight % todo: check color
- \global\setbox#1\hbox % global ?
- {\ifcase\c_page_grids_location\or\or\box#1\hskip-\makeupwidth\fi
- \begingroup % color
- \ifcase\layoutcolumns\else
- \gray
- \setlayoutcomponentattribute{\v!grid:\v!columns}%
- \hbox \layoutcomponentboxattribute to \makeupwidth
- {\dorecurse\layoutcolumns
- {\hskip\layoutcolumnwidth
- \ifnum\recurselevel<\layoutcolumns
- \vrule
- \s!height\ht\scratchbox
- \s!depth \dp\scratchbox
- \s!width \layoutcolumndistance
- \fi}}%
- \hskip-\makeupwidth
- \fi
- \setlayoutcomponentattribute{\v!grid:\v!lines}%
- \hbox \layoutcomponentboxattribute{\box\scratchbox}%
- \endgroup
- \ifcase\c_page_grids_location\or\hskip-\makeupwidth\box#1\fi}%
- \stopcolor}
-
-\def\page_grids_add_to_one_indeed#1%
- {\begingroup
- \resetvisualizers
- \global\setbox#1\vpack{\backgroundline[layout:one]{\box#1}}%
- \endgroup}
-
-\def\page_grids_add_to_mix_indeed#1%
- {\begingroup
- \resetvisualizers
- \global\setbox#1\vpack{\backgroundline[layout:mix]{\box#1}}%
- \endgroup}
-
-\protect \endinput
diff --git a/tex/context/base/mkiv/page-ini.lua b/tex/context/base/mkiv/page-ini.lua
new file mode 100644
index 000000000..17723c421
--- /dev/null
+++ b/tex/context/base/mkiv/page-ini.lua
@@ -0,0 +1,203 @@
+if not modules then modules = { } end modules ['page-ini'] = {
+ version = 1.001,
+ comment = "companion to page-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tonumber, rawget, type, next = tonumber, rawget, type, next
+local match = string.match
+local sort, tohash, insert, remove = table.sort, table.tohash, table.insert, table.remove
+local settings_to_array, settings_to_hash = utilities.parsers.settings_to_array, utilities.parsers.settings_to_hash
+
+local texgetcount = tex.getcount
+
+local context = context
+local ctx_testcase = commands.testcase
+
+local data = table.setmetatableindex("table")
+local last = 0
+local pages = structures.pages
+local autolist = { }
+local report = logs.reporter("pages","mark")
+
+local trace = false trackers.register("pages.mark",function(v) trace = v end)
+
+function pages.mark(name,list)
+ local realpage = texgetcount("realpageno")
+ if not list or list == "" then
+ if trace then
+ report("marking current page %i as %a",realpage,name)
+ end
+ data[realpage][name] = true
+ return
+ end
+ if type(list) == "string" then
+ list = settings_to_array(list)
+ end
+ if type(list) == "table" then
+ for i=1,#list do
+ local page = list[i]
+ local sign = false
+ if type(page) == "string" then
+ local s, p = match(page,"([%+%-])(%d+)")
+ if s then
+ sign, page = s, p
+ end
+ end
+ page = tonumber(page)
+ if page then
+ if sign == "+" then
+ page = realpage + page
+ end
+ if sign == "-" then
+ report("negative page numbers are not supported")
+ else
+ if trace then
+ report("marking page %i as %a",page,name)
+ end
+ data[page][name] = true
+ end
+ end
+ end
+ else
+ if trace then
+ report("marking current page %i as %a",realpage,name)
+ end
+ data[realpage][name] = true
+ end
+end
+
+function pages.marked(name)
+ local realpage = texgetcount("realpageno")
+ for i=last,realpage-1 do
+ data[i] = nil
+ end
+ local pagedata = rawget(data,realpage)
+ return pagedata and pagedata[name]
+end
+
+local function toranges(marked)
+ local list = { }
+ local size = #marked
+ if size > 0 then
+ local first = marked[1]
+ local last = first
+ for i=2,size do
+ local page = marked[i]
+ if page > last + 1 then
+ list[#list+1] = { first, last }
+ first = page
+ end
+ last = page
+ end
+ list[#list+1] = { first, last }
+ end
+ return list
+end
+
+pages.toranges = toranges
+
+local function allmarked(list)
+ if list then
+ local collected = pages.collected
+ if collected then
+ if type(list) == "string" then
+ list = settings_to_hash(list)
+ elseif type(list) == "table" and #list > 0 then
+ list = tohash(list)
+ end
+ if type(list) == "table" then
+ local found = { }
+ for name in next, list do
+ for page, list in next, data do
+ if list[name] and collected[page] then
+ found[#found+1] = page
+ end
+ end
+ end
+ if #found > 0 then
+ sort(found)
+ if trace then
+ local ranges = toranges(found)
+ for i=1,#ranges do
+ local range = ranges[i]
+ local first = range[1]
+ local last = range[2]
+ if first == last then
+ report("marked page : %i",first)
+ else
+ report("marked range: %i upto %i",first,last)
+ end
+ end
+ end
+ return found
+ end
+ end
+ end
+ end
+end
+
+pages.allmarked = allmarked
+
+-- An alternative is to use an attribute and identify the state by parsing the node
+-- list but that's a bit overkill for a hardly used feature like this.
+
+luatex.registerpageactions(function()
+ local nofauto = #autolist
+ if nofauto > 0 then
+ local realpage = texgetcount("realpageno")
+ for i=1,nofauto do
+ local names = autolist[i]
+ for j=1,#names do
+ local name = names[j]
+ data[realpage][name] = true
+ if trace then
+ report("automatically marking page %i as %a",realpage,name)
+ end
+ end
+ end
+ end
+end)
+
+interfaces.implement {
+ name = "markpage",
+ arguments = { "string", "string" },
+ actions = pages.mark
+}
+
+interfaces.implement {
+ name = "doifelsemarkedpage",
+ arguments = "string",
+ actions = { marked, ctx_testcase }
+}
+
+interfaces.implement {
+ name = "markedpages",
+ arguments = "string",
+ actions = function(name)
+ local t = allmarked(name)
+ if t then
+ context("%,t",t)
+ end
+ end
+}
+
+interfaces.implement {
+ name = "startmarkpages",
+ arguments = "string",
+ actions = function(name)
+ insert(autolist,settings_to_array(name))
+ end
+}
+
+interfaces.implement {
+ name = "stopmarkpages",
+ arguments = "string",
+ actions = function(name)
+ if #autolist > 0 then
+ remove(autolist)
+ end
+ end
+}
diff --git a/tex/context/base/mkiv/page-ini.mkiv b/tex/context/base/mkiv/page-ini.mkiv
index 6f6cb7180..dc94edf76 100644
--- a/tex/context/base/mkiv/page-ini.mkiv
+++ b/tex/context/base/mkiv/page-ini.mkiv
@@ -13,6 +13,8 @@
\writestatus{loading}{ConTeXt Page Macros / Initializations}
+\registerctxluafile{page-ini}{}
+
%D The \type {\processpage} command has become obsolete. It's original
%D purpose was to flush only parts of a document but nowadays we have
%D project structure and modes. This is just one of those very early
@@ -34,7 +36,7 @@
\newconditional\c_otr_shipout_enabled \settrue\c_otr_shipout_enabled
-\unexpanded\def\page_otr_shipout
+\unexpanded\def\page_otr_shipout % not used at all
{\ifconditional\c_otr_shipout_enabled
\expandafter\page_otr_shipout_yes
\else
@@ -65,17 +67,7 @@
\def\page_otr_flush_all_floats
{%\flushnotes already done
\ifconditional\c_page_floats_some_waiting
- \begingroup
- \c_page_floats_n_of_top\plusthousand
- \c_page_floats_n_of_bottom\zerocount
- % this is needed in case a float that has been stored
- % ends up at the current page; this border case occurs when
- % the calculated room is 'eps' smaller that the room available
- % when just flushing; so now we have (maybe optional):
- \pagebaselinecorrection % hm, needs checking, not needed when no floats
- % alas, this is tricky but needed (first surfaced in prikkels)
- \page_otr_command_flush_floats
- \endgroup
+ \page_otr_command_flush_all_floats
\fi}
\def\page_otr_insert_dummy_page
@@ -88,13 +80,57 @@
\fi}
\def\page_otr_flush_pending_content
- {\vskip\zeropoint\relax % brrr .. get rid of this
+ {\vskip\zeropoint\relax
\ifvoid\normalpagebox \else
\unvbox\normalpagebox
\penalty\outputpenalty
\fi}
-\def\page_otr_construct_and_shipout#1#2%
+\newconstant\c_page_postponed_mode % \c_page_postponed_mode\plusone
+\newbox \b_page_otr_saved
+
+\installoutputroutine\savepagecontent
+ {\global\setbox\b_page_otr_saved\box\normalpagebox}
+
+\unexpanded\def\flushpagecontent
+ {\ifvoid\b_page_otr_saved\else\unvbox\b_page_otr_saved\fi}
+
+% Test case: assumes one group level for output routine, todo:
+% a toks.
+%
+% \starttext
+%
+% \startbuffer[makeup]
+% \startpagemakeup[pagestate=start,page=no]
+% \color[red]{
+% \vfill
+% \dorecurse{3}{
+% \samplefile{klein}
+% \vfill
+% }
+% }
+% \stoppagemakeup
+% \stopbuffer
+%
+% \startcolor[green]
+% \dorecurse{10}{
+% \dontleavehmode{\bf 1:#1:} \samplefile{sapolsky}\par
+% }
+% \stopcolor
+%
+% \startpostponing % [+3]
+% \getbuffer[makeup]
+% \stoppostponing
+%
+% \startcolor[blue]
+% \dorecurse{25}{
+% \dontleavehmode{\bf 2:#1:} \samplefile{sapolsky}\par
+% }
+% \stopcolor
+%
+% \stoptext
+
+\def\page_otr_construct_and_shipout#1#2#3%
{\forgetall
\page_boxes_shipout{\page_boxes_constructed_page#1#2}% \hbox removed
\page_otr_flush_pending_content
@@ -110,7 +146,16 @@
\page_otr_check_for_pending_inserts
\page_floats_flush_page_floats % before postponed blocks
\page_spread_flush % defined later
- \page_postponed_blocks_flush}
+ \ifnum#3=\plusone
+ % this is tricky! we need to make sure we are in the output group
+ \ifnum\c_page_postponed_mode=\plusone
+ \aftergroup\page_postponed_blocks_flush
+ \else
+ \page_postponed_blocks_flush
+ \fi
+ \else
+ \page_postponed_blocks_flush
+ \fi}
% Can't we get rid of this hackery? It's used in some widgets
% stuff so probably not.
@@ -260,11 +305,49 @@
% \appendtoks
% \ifvoid\thispageinsert\else\hbox{\smashedbox\thispageinsert}\fi
% \to \everyshipout
+
+% not yet in i-* file
+
+\installcorenamespace{markedpage}
+
+\unexpanded\def\markpage
+ {\dodoubleempty\page_mark}
+
+\def\page_mark[#1][#2]%
+ {\clf_markpage{#1}{#2}}
+
+\def\markedpages[#1]% expandable
+ {\clf_markedpages{#1}}
+
+\unexpanded\def\doifelsemarkedpage#1%
+ {\clf_doifelsemarkedpage{#1}}
+
+\unexpanded\def\startmarkpages[#1]%
+ {\clf_startmarkpages{#1}}
+
+\unexpanded\def\stopmarkpages
+ {\clf_stopmarkpages}
+
+%D Experimental:
+
+\newconstant\c_page_force_strut_depth_trace_mode
+
+\installtextracker
+ {otr.forcestrutdepth}
+ {\c_page_force_strut_depth_trace_mode\plusone}
+ {\c_page_force_strut_depth_trace_mode\zerocount}
+
+\installoutputroutine\forcestrutdepth
+ {\clf_forcestrutdepth\normalpagebox\strutdp\c_page_force_strut_depth_trace_mode
+ \unvbox\normalpagebox}
+
+% maybe better:
%
-% %D Idea:
+% \installoutputroutine\doforcestrutdepth
+% {\clf_forcestrutdepth\normalpagebox\strutdp\c_page_force_strut_depth_trace_mode
+% \unvbox\normalpagebox}
%
-% \definemarkedpage[nobackgrounds]
-% \markpage[nobackgrounds]
-% \doifmarkedpageelse{nobackgrounds}
+% \unexpanded\def\forcestrutdepth
+% {\par\ifvmode\ifinner\else\doforcestrutdepth\fi\fi}
\protect \endinput
diff --git a/tex/context/base/mkiv/page-inj.mkvi b/tex/context/base/mkiv/page-inj.mkvi
index 03472fe55..e03569f23 100644
--- a/tex/context/base/mkiv/page-inj.mkvi
+++ b/tex/context/base/mkiv/page-inj.mkvi
@@ -20,7 +20,7 @@
\unprotect
-\registerctxluafile{page-inj}{1.001}
+\registerctxluafile{page-inj}{}
\installcorenamespace {pageinjection}
\installcorenamespace {pageinjectionalternative}
diff --git a/tex/context/base/mkiv/page-ins.mkiv b/tex/context/base/mkiv/page-ins.mkiv
index 09ba70b70..3b473343a 100644
--- a/tex/context/base/mkiv/page-ins.mkiv
+++ b/tex/context/base/mkiv/page-ins.mkiv
@@ -17,7 +17,7 @@
%D page builder. When multiple footnote classes were introduced, I decided to
%D isolate some of the functionality in a module.
-\registerctxluafile{page-ins}{1.001}
+\registerctxluafile{page-ins}{}
\unprotect
diff --git a/tex/context/base/mkiv/page-lay.mkiv b/tex/context/base/mkiv/page-lay.mkiv
index f2e39c660..7b80dadb9 100644
--- a/tex/context/base/mkiv/page-lay.mkiv
+++ b/tex/context/base/mkiv/page-lay.mkiv
@@ -125,7 +125,7 @@
\installswitchcommandhandler \??layout {layout} \??layout
\appendtoks
- \doifnothing{\strictlayoutparameter\c!state}{\letlayoutparameter\c!state\v!start}%
+ \doifnothing{\directlayoutparameter\c!state}{\letlayoutparameter\c!state\v!start}%
\to \everydefinelayout
\appendtoks
@@ -473,11 +473,23 @@
[\page_paper_the_paper_size{#1}]%
[\page_paper_the_print_size{#2}]}}
-\setvalue{\??layoutpaper\v!reset }{\global\setfalse\c_page_target_paper_landscape
- \global\setfalse\c_page_target_paper_mirror
- \global\setfalse\c_page_target_paper_negate
- \global\c_page_target_paper_orientation\uprotationangle
- \global\c_page_target_paper_reverse \uprotationangle}
+\unexpanded\def\page_paper_reset_paper
+ {\global\setfalse\c_page_target_paper_landscape
+ \global\setfalse\c_page_target_paper_mirror
+ \global\setfalse\c_page_target_paper_negate
+ \global\c_page_target_paper_orientation\uprotationangle
+ \global\c_page_target_paper_reverse \uprotationangle}
+
+\unexpanded\def\page_paper_reset_print
+ {\global\setfalse\c_page_target_print_landscape
+ \global\setfalse\c_page_target_print_mirror
+ \global\setfalse\c_page_target_print_negate
+ \global\c_page_target_print_orientation\uprotationangle
+ \global\c_page_target_print_reverse \uprotationangle}
+
+\letvalue{\??layoutpaper\v!reset}\page_paper_reset_paper
+\letvalue{\??layoutprint\v!reset}\page_paper_reset_print
+
\setvalue{\??layoutpaper\v!landscape }{\global\settrue\c_page_target_paper_landscape}
\setvalue{\??layoutpaper\v!mirrored }{\global\settrue\c_page_target_paper_mirror}
\setvalue{\??layoutpaper\v!negative }{\global\settrue\c_page_target_paper_negate}
@@ -490,11 +502,6 @@
\setvalue{\??layoutpaper\number\leftrotationangle }{\global\c_page_target_paper_orientation\leftrotationangle
\global\c_page_target_paper_reverse \rightrotationangle}
-\setvalue{\??layoutprint\v!reset }{\global\setfalse\c_page_target_print_landscape
- \global\setfalse\c_page_target_print_mirror
- \global\setfalse\c_page_target_print_negate
- \global\c_page_target_print_orientation\uprotationangle
- \global\c_page_target_print_reverse \uprotationangle}
\setvalue{\??layoutprint\v!landscape }{\global\settrue\c_page_target_print_landscape}
\setvalue{\??layoutprint\v!mirrored }{\global\settrue\c_page_target_print_mirror}
\setvalue{\??layoutprint\v!negative }{\global\settrue\c_page_target_print_negate}
@@ -519,18 +526,20 @@
\fi}
\unexpanded\def\page_paper_set_current_indeed[#1][#2]%
- {\edef\m_page_asked_paper{\v!reset,#1}% can be the restores
- \edef\m_page_asked_print{\v!reset,#2}%
+ {\edef\m_page_asked_paper{#1}% can be the restores
+ \edef\m_page_asked_print{#2}%
%
\page_paper_set_restore\m_page_asked_paper\m_page_asked_print
%
% locate page target
\let\currentlayouttarget\empty
+ \page_paper_reset_paper
\processcommacommand[\m_page_asked_paper]\page_paper_identify_target
\ifx\currentlayouttarget\empty
\let\currentlayouttarget\currentpage
\fi
\global\let\papersize\currentlayouttarget
+ \page_paper_reset_paper
\processcommacommand[\m_page_asked_paper]\page_paper_handle_page_option
\global\paperwidth \layouttargetparameter\c!width \relax
\global\paperheight\layouttargetparameter\c!height\relax
@@ -550,8 +559,10 @@
%
\page_paper_set_offsets
% locate paper target
+ \page_paper_reset_print
\processcommacommand[\m_page_asked_print]\page_paper_identify_target
\global\let\printpapersize\currentlayouttarget
+ \page_paper_reset_print
\processcommacommand[\m_page_asked_print]\page_paper_handle_print_option
\global\printpaperwidth \layouttargetparameter\c!width \relax
\global\printpaperheight\layouttargetparameter\c!height\relax
@@ -948,7 +959,7 @@
{\setgvalue{\??layoutalternative#1}{#2}}
\def\page_boxes_construct_content_default#1#2#3% targetbox flusher box
- {\setbox#1\vbox
+ {\setbox#1\vpack % was \vbox
{\offinterlineskip
\begingroup % needed ?
\uselayoutstyleandcolor\c!style\c!color
@@ -1114,7 +1125,7 @@
\setsystemmode\v!footer
\fi}
-\def\calculatevsizes % name will change
+\unexpanded\def\calculatevsizes % name will change
{\textheight\makeupheight
\doifelselayoutsomeline\v!header\compensatevsizeheader\donothing
\doifelselayoutsomeline\v!footer\compensatevsizefooter\donothing
@@ -1122,14 +1133,14 @@
\resetglobal
\page_otr_command_set_vsize}
-\def\calculateglobalvsizes % name will change
+\unexpanded\def\calculateglobalvsizes % name will change
{\global\textheight\makeupheight
\doifelselayoutsomeline\v!header\globalcompensatevsizeheader\donothing
\doifelselayoutsomeline\v!footer\globalcompensatevsizefooter\donothing
\page_layouts_set_modes
\page_otr_command_set_vsize}
-\def\calculatereducedvsizes % name will change
+\unexpanded\def\calculatereducedvsizes % name will change
{\textheight\makeupheight
\doifelselayoutsomeline\v!header\compensatevsizeheader\compensatevsizeheaderzero
\doifelselayoutsomeline\v!footer\compensatevsizefooter\compensatevsizefooterzero}
@@ -1142,7 +1153,7 @@
\def\compensatedinnermakeupmargin
{\dimexpr\ifconditional\innermakeupcompensation+\innermakeupmargin\else\zeropoint\fi\relax}
-\def\freezetextwidth % name will change % \makeupwidth may be set to \textwidth
+\unexpanded\def\freezetextwidth % name will change % \makeupwidth may be set to \textwidth
{\textwidth\makeupwidth % which is a tricky but valid value
\edef\m_currentlayout_text_width {\layoutparameter\c!textwidth }%
\edef\m_currentlayout_text_margin{\layoutparameter\c!textmargin}%
@@ -1159,7 +1170,7 @@
\global\advance\innermakeupwidth-\scratchdimen
\advance\textwidth-\scratchdimen} % local
-\def\calculatehsizes % name will change
+\unexpanded\def\calculatehsizes % name will change
{\freezetextwidth
\page_otr_command_set_hsize}
@@ -1343,12 +1354,18 @@
%D \showsetup{showlayout}
%D \showsetup{showsetups}
%D %showsetup{showmargins}
+%D %showsetup{showgrid}
-\fetchruntimecommand \showprint {page-run}
-\fetchruntimecommand \showframe {page-run}
-\fetchruntimecommand \showlayout {page-run}
-\fetchruntimecommand \showsetups {page-run}
-\fetchruntimecommand \showmargins {page-run}
+\fetchruntimecommand \showprint \f!page_run
+\fetchruntimecommand \showframe \f!page_run
+\fetchruntimecommand \showlayout \f!page_run
+\fetchruntimecommand \showsetups \f!page_run
+\fetchruntimecommand \showmargins \f!page_run
+\fetchruntimecommand \showgrid \f!page_run
+
+\glet\page_grids_add_to_box\gobbleoneargument
+\glet\page_grids_add_to_one\gobbleoneargument
+\glet\page_grids_add_to_mix\gobbleoneargument
%D The default dimensions are quite old and will not change.
%D The funny fractions were introduced when we went from fixed
@@ -1590,8 +1607,8 @@
\definepapersize
[doubleoversized]
- [ \c!width=\dimexpr \paperheight+\layouttargetparameter\c!distance\relax,
- \c!height=\dimexpr2\paperwidth +\layouttargetparameter\c!distance\relax]
+ [ \c!width=\dimexpr \paperwidth +\layouttargetparameter\c!distance\relax,
+ \c!height=\dimexpr2\paperheight+\layouttargetparameter\c!distance\relax]
\definepapersize
[doublewide]
@@ -1604,7 +1621,7 @@
\setuppapersize
[A4] % [samesized]
-%D A few goodies:
+%D A few goodies, first a full page layout:
\definelayout
[\v!page]
@@ -1631,9 +1648,24 @@
\c!lines=0,
\c!grid=\v!no]
+%D A quick and dirty one:
+
\definelayout
[\v!middle]
[\c!width=\v!middle,
\c!height=\v!middle]
+%D One for testing examples (5mm is often the non printable area):
+
+\definelayout
+ [\v!tight]
+
+\definelayout
+ [\v!tight]
+ [\v!page]
+ [\c!backspace=5mm,
+ \c!cutspace=5mm,
+ \c!topspace=5mm,
+ \c!bottomspace=5mm]
+
\protect \endinput
diff --git a/tex/context/base/mkiv/page-lin.lua b/tex/context/base/mkiv/page-lin.lua
index 8ec4ba5df..a6b6a12c4 100644
--- a/tex/context/base/mkiv/page-lin.lua
+++ b/tex/context/base/mkiv/page-lin.lua
@@ -71,7 +71,6 @@ local getattr = nuts.getattr
local setattr = nuts.setattr
local getlist = nuts.getlist
local getbox = nuts.getbox
-local getfield = nuts.getfield
----- getdir = nuts.getdir
----- getwidth = nuts.getwidth
local getheight = nuts.getheight
@@ -80,8 +79,6 @@ local getdepth = nuts.getdepth
local setprop = nuts.setprop
local getprop = nuts.getprop
-local setfield = nuts.setfield
-
local traverse_id = nuts.traverse_id
local traverse = nuts.traverse
local copy_node = nuts.copy
diff --git a/tex/context/base/mkiv/page-lin.mkvi b/tex/context/base/mkiv/page-lin.mkvi
index 2692087cc..4348d6770 100644
--- a/tex/context/base/mkiv/page-lin.mkvi
+++ b/tex/context/base/mkiv/page-lin.mkvi
@@ -37,7 +37,7 @@
%
% some line
-\registerctxluafile{page-lin}{1.001}
+\registerctxluafile{page-lin}{}
\definesystemattribute[linenumber] [public]
\definesystemattribute[linereference][public]
diff --git a/tex/context/base/mkiv/page-mak.mkvi b/tex/context/base/mkiv/page-mak.mkvi
index bbb7cc148..d64000542 100644
--- a/tex/context/base/mkiv/page-mak.mkvi
+++ b/tex/context/base/mkiv/page-mak.mkvi
@@ -201,6 +201,23 @@
\emptyhbox
\page}
+%D Probably obsolete (but used in one manual by Taco):
+
+\unexpanded\def\startcolumnmakeup % don't change
+ {\bgroup
+ \getrawnoflines\textheight % raw as we can have topskip
+ \setbox\scratchbox\vbox to \dimexpr\noflines\lineheight-\lineheight+\topskip\relax
+ \bgroup
+ \forgetall}
+
+\unexpanded\def\stopcolumnmakeup
+ {\egroup
+ \dp\scratchbox\zeropoint
+ \wd\scratchbox\textwidth
+ \box\scratchbox
+ \egroup
+ \page_otr_command_synchronize_hsize}
+
%D The text surrounding the main body text can be influenced
%D by setting their associated status variables. The
%D connection between them is made by the following macro
diff --git a/tex/context/base/mkiv/page-mix.lua b/tex/context/base/mkiv/page-mix.lua
index 524181c8e..107ac1410 100644
--- a/tex/context/base/mkiv/page-mix.lua
+++ b/tex/context/base/mkiv/page-mix.lua
@@ -8,8 +8,6 @@ if not modules then modules = { } end modules ["page-mix"] = {
-- inserts.getname(name)
--- getfield(l,"head") -> getlist
-
-- local node, tex = node, tex
-- local nodes, interfaces, utilities = nodes, interfaces, utilities
-- local trackers, logs, storage = trackers, logs, storage
@@ -19,13 +17,15 @@ if not modules then modules = { } end modules ["page-mix"] = {
local next, type = next, type
local concat = table.concat
-local ceil, floor = math.ceil, math.floor
+local ceil = math.ceil
local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
local report_state = logs.reporter("mixed columns")
+local context = context
+
local nodecodes = nodes.nodecodes
local hlist_code = nodecodes.hlist
@@ -47,7 +47,6 @@ local flushnode = nuts.flush
local concatnodes = nuts.concat
local slidenodes = nuts.slide -- ok here as we mess with prev links intermediately
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setlist = nuts.setlist
local setnext = nuts.setnext
@@ -57,7 +56,6 @@ local setwhd = nuts.setwhd
local setheight = nuts.setheight
local setdepth = nuts.setdepth
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
@@ -71,6 +69,8 @@ local getpenalty = nuts.getpenalty
local getwidth = nuts.getwidth
local getheight = nuts.getheight
local getdepth = nuts.getdepth
+local traverse_id = nuts.traverse_id
+local traverse = nuts.traverse
local theprop = nuts.theprop
@@ -123,7 +123,7 @@ local function collectinserts(result,nxt,nxtid)
if nxtid == insert_code then
i = i + 1
result.i = i
- inserttotal = inserttotal + getheight(nxt) -- height includes depth
+ inserttotal = inserttotal + getheight(nxt) -- height includes depth (hm, still? needs checking)
local s = getsubtype(nxt)
local c = inserts[s]
if trace_detail then
@@ -267,6 +267,7 @@ local function preparesplit(specification) -- a rather large function
local height = 0
local depth = 0
local skip = 0
+ local handlenotes = specification.notes or false
local splitmethod = specification.splitmethod or false
if splitmethod == v_none then
splitmethod = false
@@ -278,10 +279,14 @@ local function preparesplit(specification) -- a rather large function
if nofcolumns == 0 then
nofcolumns = 1
end
- local preheight = specification.preheight or 0
- local extra = specification.extra or 0
- local maxheight = specification.maxheight
- local optimal = originalheight/nofcolumns
+ local preheight = specification.preheight or 0
+ local extra = specification.extra or 0
+ local maxheight = specification.maxheight
+ local optimal = originalheight/nofcolumns
+ local noteheight = specification.noteheight or 0
+
+ maxheight = maxheight - noteheight
+
if specification.balance ~= v_yes then
optimal = maxheight
end
@@ -630,7 +635,7 @@ local function preparesplit(specification) -- a rather large function
if trace_state then
report_state("%-8s > column %s, content: %s","line (1)",column,listtoutf(getlist(current),true,true))
end
- if more then
+ if more and handlenotes then
nxt, inserts, insertskips, nextskips, inserttotal = collectinserts(result,nxt,nxtid)
end
local state, skipped = checked(advance+inserttotal+insertskips,more and "line (2)" or "line only",lastlocked)
@@ -753,7 +758,7 @@ local function preparesplit(specification) -- a rather large function
specification.overflow = overflow
specification.discarded = discarded
- setlist(getbox(specification.box),nil)
+ setlist(getbox(specification.box))
return specification
end
@@ -776,9 +781,9 @@ local function finalize(result)
end
local t = r.tail
if t then
- setnext(t,nil)
+ setnext(t)
else
- setnext(h,nil)
+ setnext(h)
r.tail = h
end
for c, list in next, r.inserts do
@@ -786,11 +791,13 @@ local function finalize(result)
for i=1,#list do
local l = list[i]
local h = new_hlist()
+ local g = getlist(l)
t[i] = h
- setlist(h,getlist(l))
- local wd, ht, dp = getwhd(l)
- -- here ht is still ht + dp !
- setwhd(h,getwidth(h),ht,dp)
+ setlist(h,g)
+ local ht = getheight(l)
+ local dp = getdepth(l)
+ local wd = getwidth(g)
+ setwhd(h,wd,ht,dp)
setlist(l)
end
setprev(t[1]) -- needs checking
@@ -822,6 +829,23 @@ local function report_deltas(result,str)
report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t)
end
+-- local function xxcollectinserts(h)
+-- local skips, total, order = 0, 0, 0
+-- print(h)
+-- if h then
+-- h = getlist(h)
+-- for n in traverse(h) do
+-- print(tonode(n))
+-- end
+-- for n in traverse_id(insert_code,h) do
+-- order = order + 1
+-- total = total + getheight(n)
+-- skips = skips + structures.notes.check_spacing(getsubtype(n),order)
+-- end
+-- end
+-- return skips, total
+-- end
+
local function setsplit(specification)
splitruns = splitruns + 1
if trace_state then
@@ -910,7 +934,6 @@ local function getsplit(result,n)
return s
end
end
-
if grid then
-- print(n,result.maxtotal,r.total,r.extra)
if isglobal then
@@ -962,13 +985,11 @@ local function getsplit(result,n)
end
for c, list in next, r.inserts do
-
local l = concatnodes(list)
for i=1,#list-1 do
setdepth(list[i],0)
end
local b = vpack(l) -- multiple arguments, todo: fastvpack
-
-- setbox("global",c,b)
setbox(c,b)
r.inserts[c] = nil
@@ -1025,6 +1046,7 @@ implement {
{ "box", "integer" },
{ "nofcolumns", "integer" },
{ "maxheight", "dimen" },
+ { "noteheight", "dimen" },
{ "step", "dimen" },
{ "cycles", "integer" },
{ "preheight", "dimen" },
@@ -1037,6 +1059,7 @@ implement {
{ "alternative" },
{ "internalgrid" },
{ "grid", "boolean" },
+ { "notes", "boolean" },
}
}
}
diff --git a/tex/context/base/mkiv/page-mix.mkiv b/tex/context/base/mkiv/page-mix.mkiv
index 7defece12..684ebc585 100644
--- a/tex/context/base/mkiv/page-mix.mkiv
+++ b/tex/context/base/mkiv/page-mix.mkiv
@@ -35,7 +35,7 @@
%
% \ctxlua{tex.setbox("global",0,node.hpack(nodes.pool.glyph("a",font.current())))}\box0
-\registerctxluafile{page-mix}{1.001}
+\registerctxluafile{page-mix}{}
\unprotect
@@ -84,6 +84,7 @@
\c!step=.25\lineheight, % needs some experimenting
%\c!splitmethod=\v!fixed, % will be default
\c!direction=\v!normal, % new (also todo in the new columnsets)
+ \c!notes=\v!yes,
\c!method=\ifinner\s!box\else\s!otr\fi] % automatic as suggested by WS
\let\startmixedcolumns\relax % defined later
@@ -123,6 +124,8 @@
\setnewconstant\c_page_mix_routine_balance \plusthree
\setnewconstant\c_page_mix_routine_error \plusfour
+\newconditional\c_page_mix_process_notes
+
%D The main environment is called as follows:
%D
%D \starttyping
@@ -183,7 +186,8 @@
\c!splitmethod=\v!none,
\c!grid=\v!tolerant,
\c!internalgrid=\v!halfline, % new, we may still revert to \v!line
- \c!balance=\v!yes]
+ \c!balance=\v!yes,
+ \c!notes=\v!no] % kind of hidden
% better
@@ -193,6 +197,17 @@
\c!grid=\v!yes,
\c!internalgrid=\v!line]
+% even better:
+
+\setupitemgroup
+ [\c!grid=\v!tolerant:10] % 10 pct tolerance in columns snapping
+
+\setupmixedcolumns
+ [\s!itemgroupcolumns]
+ [\c!grid=\itemgroupparameter\c!grid]
+
+% the fast hooks:
+
\unexpanded\def\strc_itemgroups_start_columns
{\startmixedcolumns[\s!itemgroupcolumns]} % we could have a fast one
@@ -239,7 +254,7 @@
\def\page_mix_routine_error
{\showmessage\m!columns3\empty
- \page_otr_construct_and_shipout\unvbox\normalpagebox}
+ \page_otr_construct_and_shipout\unvbox\normalpagebox\zerocount} % three arguments
%D Some settings (and actions) depend on the current output routine and setting the
%D hsize and vsize is among them. The calculation of the hsize is done elsewhere.
@@ -464,18 +479,29 @@
\let\page_mix_fast_columns_stop\relax
+\newtoks\t_page_mix_at_the_end
+
+\def\page_mix_finalize_columns
+ {\ifconditional\c_page_mix_process_notes \else
+ \global\t_page_mix_at_the_end{\stoppostponingnotes}%
+ \fi}
+
\unexpanded\def\page_mix_columns_stop_yes
- {\begincsname\??mixedcolumnsstop \currentmixedcolumnsmethod\endcsname % no \relax
+ {\begincsname\??mixedcolumnsstop\currentmixedcolumnsmethod\endcsname % no \relax
+ \page_mix_finalize_columns
\endgroup
\begincsname\??mixedcolumnsafter\currentmixedcolumnsmethod\endcsname\relax
\mixedcolumnsparameter\c!after\relax
\popmacro\currentmixedcolumnsmethod
- \popmacro\currentmixedcolumns}
+ \popmacro\currentmixedcolumns
+ \the\t_page_mix_at_the_end\global\t_page_mix_at_the_end\emptytoks}
\unexpanded\def\page_mix_columns_stop_nop
- {\endgroup
+ {\page_mix_finalize_columns
+ \endgroup
\popmacro\currentmixedcolumnsmethod
- \popmacro\currentmixedcolumns}
+ \popmacro\currentmixedcolumns
+ \the\t_page_mix_at_the_end\global\t_page_mix_at_the_end\emptytoks}
% \unexpanded\def\page_mix_columns_stop_yes
% {\begincsname\??mixedcolumnsstop \currentmixedcolumnsmethod\endcsname % no \relax
@@ -528,6 +554,11 @@
\leftskip \zeropoint
\rightskip\zeropoint
%
+ \doifelse{\mixedcolumnsparameter\c!notes}\v!yes\settrue\setfalse\c_page_mix_process_notes
+ \ifconditional\c_page_mix_process_notes \else
+ \startpostponingnotes
+ \fi
+ %
\d_page_mix_threshold\zeropoint
%
\d_page_mix_column_width\dimexpr(\d_page_mix_max_width-\d_page_mix_distance*\numexpr(\c_page_mix_n_of_columns-\plusone)\relax)/\c_page_mix_n_of_columns\relax
@@ -634,10 +665,16 @@
\unexpanded\def\page_mix_routine_construct#1%
{\d_page_mix_max_height\mixedcolumnsparameter\c!maxheight % can have changed due to header=high
+ \ifconditional\c_page_mix_process_notes
+ \totalnoteheight\zeropoint
+ \else
+ \settotalinsertionheight
+ \fi
\clf_mixsetsplit
box \b_page_mix_collected
nofcolumns \c_page_mix_n_of_columns
maxheight \d_page_mix_max_height
+ noteheight \totalnoteheight
step \d_page_mix_balance_step
cycles \c_page_mix_balance_cycles
preheight \d_page_mix_preceding_height
@@ -649,7 +686,8 @@
balance {#1}%
alternative {\mixedcolumnsparameter\c!alternative}%
internalgrid {\mixedcolumnsparameter\c!internalgrid}%
- grid \ifgridsnapping true\else false\fi
+ grid \ifgridsnapping tru\else fals\fi e %
+ notes \ifconditional\c_page_mix_process_notes tru\else fals\fi e %
\relax
\deadcycles\zerocount}
@@ -710,7 +748,13 @@
\fi}%
\fi
\egroup
- \egroup}
+ \hskip\d_page_mix_rightskip
+ \egroup
+ \wd\b_page_mix_collected\dimexpr
+ \d_page_mix_max_width
+ +\d_page_mix_rightskip
+ +\d_page_mix_leftskip
+ \relax }
\unexpanded\def\page_mix_command_package_column
{\page_mix_hbox to \d_page_mix_column_width \bgroup
@@ -719,7 +763,9 @@
\hskip-\d_page_mix_column_width
\vbox \bgroup
\hsize\d_page_mix_column_width
- \placenoteinserts
+ \ifconditional\c_page_mix_process_notes
+ \placenoteinserts
+ \fi
\egroup
\hss
\egroup}
@@ -731,7 +777,7 @@
\setbox\b_page_mix_collected\vpack{\unvbox\normalpagebox}% brrr we need to make a tight box (combine this in lua)
\page_mix_routine_construct\v!no
\page_mix_routine_package
- \page_otr_construct_and_shipout\box\b_page_mix_collected
+ \page_otr_construct_and_shipout\box\b_page_mix_collected\zerocount % three arguments
\clf_mixflushrest
\clf_mixcleanup
\egroup}
@@ -769,7 +815,7 @@
% 1 = we have stuff left, so flush and rebalance
%writestatus\m!columns{flush continue}%
\page_mix_routine_package
- \page_otr_construct_and_shipout\box\b_page_mix_collected
+ \page_otr_construct_and_shipout\box\b_page_mix_collected\zerocount % three arguments
\setbox\b_page_mix_collected\vpack{\clf_mixflushrest}% we could avoid this
\clf_mixcleanup
\ifdim\ht\b_page_mix_collected=\zeropoint
@@ -932,10 +978,10 @@
\unexpanded\def\page_mix_place_float_force
{\page_one_place_float_force}
-\unexpanded\def\page_mix_command_side_float_output % hm
- {\page_otr_construct_and_shipout\unvbox\normalpagebox}
+\unexpanded\def\page_mix_command_side_float_output
+ {\page_otr_construct_and_shipout\unvbox\normalpagebox\zerocount} % three arguments
-\unexpanded\def\page_mix_command_synchronize_side_floats % hm
+\unexpanded\def\page_mix_command_synchronize_side_floats
{\page_sides_forget_floats}
\unexpanded\def\page_mix_command_flush_side_floats
@@ -965,27 +1011,30 @@
%D We need to hook some handlers into the output routine and we define
%D a dedicated one:
+\let\page_mix_command_flush_all_floats\page_one_command_flush_all_floats
+
\defineoutputroutine
[\s!mixedcolumn]
[\s!page_otr_command_routine =\page_mix_command_routine,
\s!page_otr_command_package_contents =\page_mix_command_package_contents,
\s!page_otr_command_set_vsize =\page_mix_command_set_vsize,
\s!page_otr_command_set_hsize =\page_mix_command_set_hsize,
+ % \s!page_otr_command_synchronize_hsize =\page_mix_command_synchronize_hsize,
\s!page_otr_command_next_page =\page_mix_command_next_page,
\s!page_otr_command_next_page_and_inserts =\page_mix_command_next_page_and_inserts,
- % \s!page_otr_command_synchronize_hsize =\page_mix_command_synchronize_hsize,
% \s!page_otr_command_set_top_insertions =\page_mix_command_set_top_insertions,
% \s!page_otr_command_set_bottom_insertions =\page_mix_command_set_bottom_insertions,
% \s!page_otr_command_flush_top_insertions =\page_mix_command_flush_top_insertions,
% \s!page_otr_command_flush_bottom_insertions=\page_mix_command_flush_bottom_insertions,
- % \s!page_otr_command_set_float_hsize =\page_mix_command_set_float_hsize,
\s!page_otr_command_check_if_float_fits =\page_mix_command_check_if_float_fits,
+ % \s!page_otr_command_set_float_hsize =\page_mix_command_set_float_hsize,
\s!page_otr_command_flush_float_box =\page_mix_command_flush_float_box,
- \s!page_otr_command_synchronize_side_floats=\page_mix_command_synchronize_side_floats,
\s!page_otr_command_side_float_output =\page_mix_command_side_float_output,
+ \s!page_otr_command_synchronize_side_floats=\page_mix_command_synchronize_side_floats,
\s!page_otr_command_flush_floats =\page_mix_command_flush_floats,
\s!page_otr_command_flush_side_floats =\page_mix_command_flush_side_floats,
\s!page_otr_command_flush_saved_floats =\page_mix_command_flush_saved_floats,
+ \s!page_otr_command_flush_all_floats =\page_mix_command_flush_all_floats,
% \s!page_otr_command_flush_margin_blocks =\page_mix_command_flush_margin_blocks, % not used
\s!page_otr_command_test_column =\page_mix_command_test_column
]
diff --git a/tex/context/base/mkiv/page-mrk.mkiv b/tex/context/base/mkiv/page-mrk.mkiv
index 43116e84d..69746ea89 100644
--- a/tex/context/base/mkiv/page-mrk.mkiv
+++ b/tex/context/base/mkiv/page-mrk.mkiv
@@ -13,13 +13,19 @@
\writestatus{loading}{ConTeXt Page Macros / Cutmarks and Colorbars}
-%D This module depends on \METAPOST.
-
\unprotect
-% \def\pagecutmarksymbol {\the\realpageno}
-% \let\pagecutmarktoptext \empty
-% \let\pagecutmarkbottomtext \empty
+%D This module implements curmarks and depends on \METAPOST.
+%D
+%D \starttyping
+%D \setuplayout[marking=on,location=middle]
+%D \setuplayout[marking=page,location=middle]
+%D \setuplayout[marking=empty,location=middle]
+%D \setuplayout[marking={color,page},location=middle]
+%D \setuplayout[marking={one,page},location=middle]
+%D \setuplayout[marking={two,page},location=middle]
+%D \setuplayout[marking={four,page},location=middle]
+%D \stoptyping
\def\pagecutmarklength {5mm}
\def\pagecutmarkoffset {3mm} % slightly larger than before, and now a fixed size
@@ -49,7 +55,8 @@
\startuniqueMPgraphic{print:lines}{w,h,l,o,x,y}
loadmodule "crop" ;
- page_marks_add_lines(\MPvar w,\MPvar h,\MPvar l,\MPvar o,\MPvar x,\MPvar y) ;
+ page_marks_add_lines(\MPvar w,\MPvar h,\MPvar l,\MPvar o,\MPvar x,\MPvar y,
+ \ifconditional\c_page_marks_add_page_lines tru\else fals\fi e) ;
\stopuniqueMPgraphic
\startuseMPgraphic{print:number}{w,h,l,o,n}
@@ -112,9 +119,7 @@
\scratchheight\ht#1%
\scratchdepth \dp#1%
\box#1\relax
- \ifconditional\c_page_marks_add_page_lines
- \page_marks_add_lines
- \fi
+ \page_marks_add_lines
\egroup}
\def\page_marks_add_more_indeed#1%
@@ -163,28 +168,29 @@
% \let\pagecutmarkbottomtext\extrapagecutmarkbottomtext}
\installpagecutmark\v!on
- {\settrue\c_page_marks_add_page_lines
+ {%settrue\c_page_marks_add_page_lines
\settrue\c_page_marks_add_more_lines
\settrue\c_page_marks_add_more_number}
-\installpagecutmark\v!page % todo
- {\settrue\c_page_marks_add_more_lines
+\installpagecutmark\v!page
+ {\settrue\c_page_marks_add_page_lines
+ \settrue\c_page_marks_add_more_lines
\settrue\c_page_marks_add_more_number}
\installpagecutmark\v!empty
- {\settrue\c_page_marks_add_page_lines
+ {%settrue\c_page_marks_add_page_lines
\settrue\c_page_marks_add_more_lines}
\installpagecutmark\v!color
- {\settrue\c_page_marks_add_page_lines
+ {%settrue\c_page_marks_add_page_lines
\settrue\c_page_marks_add_more_lines
\settrue\c_page_marks_add_more_color
\settrue\c_page_marks_add_more_marking
\settrue\c_page_marks_add_more_number}
-\installpagecutmark\v!one {\global\c_page_marks_max\plusone}
-\installpagecutmark\v!two {\global\c_page_marks_max\plustwo}
-\installpagecutmark\v!four{\global\c_page_marks_max\plusfour}
+\installpagecutmark\v!one {\global\c_page_marks_max\plusone} % first page
+\installpagecutmark\v!two {\global\c_page_marks_max\plustwo} % first two pages
+\installpagecutmark\v!four{\global\c_page_marks_max\plusfour} % first four pages
\unexpanded\def\page_marks_set#1%
{\begincsname\??layoutmarking#1\endcsname}
diff --git a/tex/context/base/mkiv/page-mul.mkiv b/tex/context/base/mkiv/page-mul.mkiv
index fcad2c4c6..51af24b6a 100644
--- a/tex/context/base/mkiv/page-mul.mkiv
+++ b/tex/context/base/mkiv/page-mul.mkiv
@@ -15,6 +15,8 @@
% !!! there are some issues with hsize an vsize as well as flushing
% !!! floats but this module will be redone anyway
+%
+% can have some vpack and hpack
\writestatus{loading}{ConTeXt Page Macros / Simple Multi Column}
@@ -84,7 +86,7 @@
{\page_one_place_float_force}
\unexpanded\def\page_mul_command_side_float_output
- {\page_otr_construct_and_shipout\unvbox\normalpagebox}
+ {\page_otr_construct_and_shipout\unvbox\normalpagebox\zerocount} % three arguments
\unexpanded\def\page_mul_command_flush_side_floats
{\page_sides_forget_floats}
@@ -339,7 +341,7 @@
\def\page_mul_routine_error
{\showmessage\m!columns3\empty
- \page_otr_construct_and_shipout\unvbox\normalpagebox}
+ \page_otr_construct_and_shipout\unvbox\normalpagebox\zerocount} % three arguments
%D When we leave the multi||column mode, we have to process the not yet shipped out
%D part of the columns. When we don't balance, we simply force a continuous output,
@@ -856,7 +858,7 @@
{\ht\currentcolumnbox\d_page_mul_balance_target}% redundant
\fi
\setbox\b_page_mul_preceding\vbox{\page_mul_flush_packaged_columns_continued}%
- \page_otr_construct_and_shipout\box\b_page_mul_preceding
+ \page_otr_construct_and_shipout\box\b_page_mul_preceding\zerocount % three arguments
\page_otr_command_set_hsize
\page_otr_command_set_vsize
\page_mul_flush_floats
@@ -1818,6 +1820,7 @@
\let\page_mul_command_package_contents\page_one_command_package_contents
\let\page_mul_command_flush_float_box \page_one_command_flush_float_box
+\let\page_mul_command_flush_all_floats\page_one_command_flush_all_floats
\defineoutputroutine
[\s!multicolumn]
@@ -1825,21 +1828,22 @@
\s!page_otr_command_package_contents =\page_mul_command_package_contents,
\s!page_otr_command_set_vsize =\page_mul_command_set_vsize,
\s!page_otr_command_set_hsize =\page_mul_command_set_hsize,
+ % \s!page_otr_command_synchronize_hsize =\page_mul_command_synchronize_hsize,
\s!page_otr_command_next_page =\page_mul_command_next_page,
\s!page_otr_command_next_page_and_inserts =\page_mul_command_next_page_and_inserts,
- % \s!page_otr_command_synchronize_hsize =\page_mul_command_synchronize_hsize,
% \s!page_otr_command_set_top_insertions =\page_mul_command_set_top_insertions,
% \s!page_otr_command_set_bottom_insertions =\page_mul_command_set_bottom_insertions,
\s!page_otr_command_flush_top_insertions =\page_mul_command_flush_top_insertions,
% \s!page_otr_command_flush_bottom_insertions=\page_mul_command_flush_bottom_insertions,
- % \s!page_otr_command_set_float_hsize =\page_mul_command_set_float_hsize,
\s!page_otr_command_check_if_float_fits =\page_mul_command_check_if_float_fits,
+ % \s!page_otr_command_set_float_hsize =\page_mul_command_set_float_hsize,
\s!page_otr_command_flush_float_box =\page_mul_command_flush_float_box,
- \s!page_otr_command_synchronize_side_floats=\page_mul_command_synchronize_side_floats,
\s!page_otr_command_side_float_output =\page_mul_command_side_float_output,
+ \s!page_otr_command_synchronize_side_floats=\page_mul_command_synchronize_side_floats,
\s!page_otr_command_flush_floats =\page_mul_command_flush_floats,
\s!page_otr_command_flush_side_floats =\page_mul_command_flush_side_floats,
\s!page_otr_command_flush_saved_floats =\page_mul_command_flush_saved_floats,
+ \s!page_otr_command_flush_all_floats =\page_mul_command_flush_all_floats,
% \s!page_otr_command_flush_margin_blocks =\page_mul_command_flush_margin_blocks, % not used
\s!page_otr_command_test_column =\page_mul_command_test_column
]
diff --git a/tex/context/base/mkiv/page-one.mkiv b/tex/context/base/mkiv/page-one.mkiv
index 9bd6951f3..35233247b 100644
--- a/tex/context/base/mkiv/page-one.mkiv
+++ b/tex/context/base/mkiv/page-one.mkiv
@@ -1,4 +1,4 @@
- %D \module
+%D \module
%D [ file=page-one,
%D version=2000.10.20,
%D title=\CONTEXT\ Page Macros,
@@ -192,7 +192,7 @@
\egroup}
\unexpanded\def\page_one_command_side_float_output
- {\page_otr_construct_and_shipout\unvbox\normalpagebox}
+ {\page_otr_construct_and_shipout\unvbox\normalpagebox\plusone} % three arguments, we need to be in the output group
\unexpanded\def\page_one_command_routine
{\page_sides_output_routine}
@@ -365,29 +365,33 @@
\def\page_one_command_flush_floats_indeed % much in common with OTRSET
{\ifconditional\c_page_floats_some_waiting
\ifconditional\c_page_floats_pack_flushed
- \setfalse\c_page_floats_center_box % not needed as we do call directly
\page_floats_collect\s!text\hsize\emwidth
- %% no longer (interferes with footnotes):
- %%
- %% \page_one_command_set_vsize % test 2011.06.24.001
- %%
- \global\setbox\floatbox\hbox to \hsize
- {\hfil
- \dorecurse\nofcollectedfloats
- {\ifcase\columndirection % nog document wide
- \page_floats_flush\s!text\plusone
- \else
- \page_floats_flush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
- \fi
- \ifdim\wd\floatbox>\makeupwidth % \hsize
- \hbox to \makeupwidth{\hss\box\floatbox\hss}%
- \else
- \box\floatbox
- \fi
- \ifnum\recurselevel<\nofcollectedfloats
- \hfil
- \fi}%
- \hfil}%
+ \ifcase\nofcollectedfloats
+ \page_floats_get
+ \else
+ \setfalse\c_page_floats_center_box % not needed as we do call directly
+ %% no longer (interferes with footnotes):
+ %%
+ %% \page_one_command_set_vsize % test 2011.06.24.001
+ %%
+ \global\setbox\floatbox\hbox to \hsize
+ {\hfil
+ \dorecurse\nofcollectedfloats
+ {\ifcase\columndirection % nog document wide
+ \page_floats_flush\s!text\plusone
+ \else
+ \page_floats_flush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
+ \fi
+ \ifdim\wd\floatbox>\makeupwidth % \hsize
+ \hpack to \makeupwidth{\hss\box\floatbox\hss}%
+ \else
+ \box\floatbox
+ \fi
+ \ifnum\recurselevel<\nofcollectedfloats
+ \hfil
+ \fi}%
+ \hfil}%
+ \fi
\else
\page_floats_get
\fi
@@ -423,20 +427,21 @@
\fi
% should be an option
\endgroup
- \dimen0\dimexpr\pagetotal+\floatheight+\d_strc_floats_top-\pageshrink\relax
- \dimen2\pagegoal
+ \scratchdimenone\dimexpr\pagetotal+\floatheight+\d_strc_floats_top-\pageshrink\relax
+ \scratchdimentwo\pagegoal
\relax % needed
\ifcase\c_page_one_float_method
% method 0 : raw
\or
% method 1 : safe
- \dimen2 .99\pagegoal
+ % too fuzzy as it can change and for a high page it's a lot : \scratchdimentwo .99\pagegoal
+ \advance\scratchdimentwo -\strutdp
\or
% method 2 : tight
- \advance\dimen0 -\onepoint
+ \advance\scratchdimenone -\onepoint
\fi
\relax % really needed ! ! ! !
- \ifdim\dimen0>\dimen2
+ \ifdim\scratchdimenone>\scratchdimentwo
\global\setfalse\c_page_floats_room
\else
\global\settrue\c_page_floats_room
@@ -456,7 +461,6 @@
\fi\fi
\fi}
-
\def\page_one_place_float_here_indeed
{\ifgridsnapping
% otherwise real bad outcome
@@ -596,27 +600,43 @@
\unexpanded\def\page_one_command_test_page
{\testpage}
+\unexpanded\def\page_one_command_flush_all_floats
+ {\ifconditional\c_page_floats_some_waiting
+ \begingroup
+ \c_page_floats_n_of_top\plusthousand
+ \c_page_floats_n_of_bottom\zerocount
+ % this is needed in case a float that has been stored
+ % ends up at the current page; this border case occurs when
+ % the calculated room is 'eps' smaller that the room available
+ % when just flushing; so now we have (maybe optional):
+ \pagebaselinecorrection % hm, needs checking, not needed when no floats
+ % alas, this is tricky but needed (first surfaced in prikkels)
+ \page_otr_command_flush_floats
+ \endgroup
+ \fi}
+
\defineoutputroutine
[\s!singlecolumn]
[\s!page_otr_command_routine =\page_one_command_routine,
\s!page_otr_command_package_contents =\page_one_command_package_contents,
\s!page_otr_command_set_vsize =\page_one_command_set_vsize,
\s!page_otr_command_set_hsize =\page_one_command_set_hsize,
+ % \s!page_otr_command_synchronize_hsize =\page_one_command_synchronize_hsize,
\s!page_otr_command_next_page =\page_one_command_next_page,
\s!page_otr_command_next_page_and_inserts =\page_one_command_next_page_and_inserts,
- % \s!page_otr_command_synchronize_hsize =\page_one_command_synchronize_hsize,
\s!page_otr_command_set_top_insertions =\page_one_command_set_top_insertions,
\s!page_otr_command_set_bottom_insertions =\page_one_command_set_bottom_insertions,
\s!page_otr_command_flush_top_insertions =\page_one_command_flush_top_insertions,
\s!page_otr_command_flush_bottom_insertions=\page_one_command_flush_bottom_insertions,
- % \s!page_otr_command_set_float_hsize =\page_one_command_set_float_hsize,
\s!page_otr_command_check_if_float_fits =\page_one_command_check_if_float_fits,
+ % \s!page_otr_command_set_float_hsize =\page_one_command_set_float_hsize,
\s!page_otr_command_flush_float_box =\page_one_command_flush_float_box,
- \s!page_otr_command_synchronize_side_floats=\page_one_command_synchronize_side_floats,
\s!page_otr_command_side_float_output =\page_one_command_side_float_output,
+ \s!page_otr_command_synchronize_side_floats=\page_one_command_synchronize_side_floats,
\s!page_otr_command_flush_floats =\page_one_command_flush_floats,
\s!page_otr_command_flush_side_floats =\page_one_command_flush_side_floats,
\s!page_otr_command_flush_saved_floats =\page_one_command_flush_saved_floats,
+ \s!page_otr_command_flush_all_floats =\page_one_command_flush_all_floats,
\s!page_otr_command_flush_margin_blocks =\page_one_command_flush_margin_blocks,
\s!page_otr_command_test_column =\page_one_command_test_page
]
diff --git a/tex/context/base/mkiv/page-otr.mkvi b/tex/context/base/mkiv/page-otr.mkvi
index ec38a06c1..ebaf17ebc 100644
--- a/tex/context/base/mkiv/page-otr.mkvi
+++ b/tex/context/base/mkiv/page-otr.mkvi
@@ -19,7 +19,7 @@
% When issuing two \par\penalty-\plustenthousand's, only the first
% triggers the otr. Is this an obscure feature or an optimization?
-\registerctxluafile{page-otr}{1.001}
+\registerctxluafile{page-otr}{}
\unprotect
@@ -52,7 +52,7 @@
\to \everyswitchoutputroutine
\unexpanded\def\page_otr_specifics_preset#name%
- {\edef\page_otr_specifics_command{\strictoutputroutineparameter{#name}}% no inheritance of commands
+ {\edef\page_otr_specifics_command{\directoutputroutineparameter{#name}}% no inheritance of commands
\ifx\page_otr_specifics_command\empty
\writestatus{\currentoutputroutine}{- \expandafter\strippedcsname\csname#name\endcsname}%
\expandafter\let\csname#name\endcsname\relax
@@ -62,7 +62,7 @@
\fi}
\unexpanded\def\page_otr_specifics_preset_normal#name%
- {\edef\page_otr_specifics_command{\strictoutputroutineparameter{#name}}% no inheritance of commands
+ {\edef\page_otr_specifics_command{\directoutputroutineparameter{#name}}% no inheritance of commands
\ifx\page_otr_specifics_command\empty
\expandafter\let\csname#name\endcsname\relax
\else
@@ -70,7 +70,7 @@
\fi}
\unexpanded\def\page_otr_specifics_preset_traced#name%
- {\edef\page_otr_specifics_command{\strictoutputroutineparameter{#name}}% no inheritance of commands
+ {\edef\page_otr_specifics_command{\directoutputroutineparameter{#name}}% no inheritance of commands
\ifx\page_otr_specifics_command\empty
\writestatus{\currentoutputroutine}{preset: - \expandafter\strippedcsname\csname#name\endcsname}%
\expandafter\let\csname#name\endcsname\relax
@@ -114,8 +114,9 @@
c:\number\mofcolumns,\space
v:\the\vsize ,\space
g:\the\pagegoal ,\space
- t:\the\pagetotal ,\space
+ t:\the\pagetotal
\ifdim\pagetotal>\pagegoal
+ ,\space
d:\the\dimexpr\pagetotal-\pagegoal\relax
\fi}}
@@ -166,12 +167,32 @@
#content\relax
\the\everyafteroutput}}
+% Just as fuzzy (and in 'one' we are okay with \aftergroup anyway):
+%
+% \ifdefined\everybeforeoutputgroup \else \newtoks\everybeforeoutputgroup \fi
+% \ifdefined\everyafteroutputgroup \else \newtoks\everyafteroutputgroup \fi
+%
+% \def\page_otf_set_engine_output_routine#content%
+% {\the\everybeforeoutputgroup
+% \global\output
+% {\inotrtrue
+% \the\everybeforeoutput
+% #content\relax
+% \the\everyafteroutput
+% \aftergroup\the\aftergroup\everyafteroutputgroup}}
+%
+% \appendtoks
+% \ifnum\c_page_postponed_mode=\plusone
+% \page_postponed_blocks_flush % and then not in \page_otr_construct_and_shipout
+% \fi
+% \to \everyafteroutputgroup
+
\page_otf_set_engine_output_routine\page_otr_triggered_output_routine
-\installoutputroutine\synchronizeoutput
+\installoutputroutine\synchronizeoutput % use \triggerpagebuilder instead
{\ifvoid\normalpagebox\else
\unvbox\normalpagebox
- \pagediscards
+ % not \pagediscards as it does more harm than good
\fi}
\installoutputroutine\discardpage
@@ -238,7 +259,9 @@
{\writestatus\m!system{Sorry, you're not done yet, so no goodbye!}}
%D We define a few constants because that (1) provides some checking
-%D and (2) is handier when aligning definitions (checks nicer).
+%D and (2) is handier when aligning definitions (checks nicer). Most
+%D routines will use ard codes names but sometimes we want to adapt,
+%D which is why we have these:
\definesystemconstant{page_otr_command_routine}
\definesystemconstant{page_otr_command_package_contents}
@@ -259,12 +282,14 @@
\definesystemconstant{page_otr_command_flush_floats}
\definesystemconstant{page_otr_command_flush_side_floats}
\definesystemconstant{page_otr_command_flush_saved_floats}
+\definesystemconstant{page_otr_command_flush_all_floats}
\definesystemconstant{page_otr_command_flush_margin_blocks}
\definesystemconstant{page_otr_command_test_column}
\definesystemconstant{singlecolumn}
\definesystemconstant{multicolumn} % will move
\definesystemconstant{columnset} % will move
+\definesystemconstant{pagecolumn} % will move
\defineoutputroutinecommand
[\s!page_otr_command_routine,
@@ -286,6 +311,7 @@
\s!page_otr_command_flush_floats,
\s!page_otr_command_flush_side_floats,
\s!page_otr_command_flush_saved_floats,
+ \s!page_otr_command_flush_all_floats,
\s!page_otr_command_flush_margin_blocks,
\s!page_otr_command_test_column]
diff --git a/tex/context/base/mkiv/page-pcl.mkiv b/tex/context/base/mkiv/page-pcl.mkiv
new file mode 100644
index 000000000..53d9f781d
--- /dev/null
+++ b/tex/context/base/mkiv/page-pcl.mkiv
@@ -0,0 +1,812 @@
+%D \module
+%D [ file=page-pcl,
+%D version=2017.11.08,
+%D title=\CONTEXT\ Page Macros,
+%D subtitle=Page Columns,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Page Macros / Page Columns}
+
+%D This is very experimental code! We took a bit from the mixed columns and single
+%D column page code. This one works acceptable with floats and is for whole double
+%D column documents. We don't balance (yet). Footnotes are per column. One can have
+%D side floats too. No balancing and other fancy features.
+%D
+%D Don't use this in production! Although the main approach will stay there might be
+%D changes in the way floats are dealt with. Not much testing has been done but as
+%D we stay close to the single column mode we expect most to just work. Only floats
+%D are the (usual) pain. Backgrounds, line numbering, etc.\ not tested either.
+
+\unprotect
+
+\definemeasure[onecolumn] [\columnwidth]
+\definemeasure[twocolumns] [\dimexpr\plustwo \columnwidth+ \columndistance\relax]
+\definemeasure[threecolumns][\dimexpr\plusthree\columnwidth+\plustwo \columndistance\relax]
+\definemeasure[fourcolumns] [\dimexpr\plusfour \columnwidth+\plusthree\columndistance\relax]
+
+\newcount\c_page_col_n_of_columns \c_page_col_n_of_columns\plusone
+\newcount\c_page_col_current \c_page_col_current \plusone
+\newdimen\d_page_col_distance
+\newdimen\d_page_col_max_height
+\newdimen\d_page_col_max_width
+%newdimen\d_page_col_balance_step
+\newdimen\d_page_col_column_width
+
+\newdimen\d_page_col_top_height
+\newdimen\d_page_col_top_width
+
+\newdimen\d_page_col_available
+\newdimen\d_page_col_sofar
+
+%D We need to step over empty columns.
+
+\unexpanded\def\page_col_command_next_page
+ {\page_col_eject_page}
+
+\unexpanded\def\page_col_column
+ {\page_otr_eject_page}
+
+\unexpanded\def\page_col_eject_page
+ {\begingroup
+ \scratchcountertwo \realpageno
+ \page_otr_eject_page
+ \scratchcounterone \c_page_col_current
+ \scratchcounterthree\zerocount
+ \doloop{%
+ \ifnum\scratchcounterthree>\plushundred
+ % too many attempts
+ \exitloop
+ \else\ifnum\realpageno>\scratchcountertwo
+ % we advanced at least one page so we're done
+ \exitloop
+ \else
+ \ifnum\scratchcounterone=\c_page_col_current
+ \dontleavehmode\null
+ \fi
+ \page_otr_eject_page
+ \scratchcounterone\c_page_col_current
+ \advance\scratchcounterthree\plusone
+ \fi\fi
+ }%
+ \endgroup}
+
+%D \unknown
+
+\unexpanded\def\page_col_command_next_page_and_inserts
+ {\page_otr_eject_page_and_flush_inserts}
+
+%D \unknown
+
+\unexpanded\def\page_col_command_set_hsize
+ {\global\hsize\d_page_col_column_width\relax
+ \global\d_page_col_available\dimexpr
+ \numexpr\c_page_col_n_of_columns-\c_page_col_current+\plusone\relax\d_page_col_column_width
+ + \numexpr\c_page_col_n_of_columns-\c_page_col_current \relax\d_page_col_distance
+ \relax
+ \global\d_page_col_sofar
+ \ifnum\c_page_col_n_of_columns=\plusone
+ \zeropoint
+ \else
+ \numexpr\c_page_col_n_of_columns-\plusone\relax
+ \dimexpr\d_page_col_column_width+\d_page_col_distance\relax
+ \fi}
+
+%D \unknown
+
+\unexpanded\def\page_col_command_set_vsize % \page_one_command_set_vsize minus the pagegoal setting
+ {\ifgridsnapping
+ \ifcase\layoutlines
+ \getrawnoflines\textheight
+ \else
+ \noflines\layoutlines
+ \fi
+ \global\vsize\noflines\openlineheight
+ \else
+ \global\vsize\textheight
+ \fi}
+
+%D \unknown
+
+\unexpanded\def\page_col_command_package_contents#1#2% \box \unvbox % this one will be redone (checked)
+ {\bgroup
+ \setbox\b_page_one_contents\vbox to \textheight
+ {\page_one_registered_text_area_a#1#2}%
+ \page_one_command_package_show_state
+ \ht\b_page_one_contents\textheight
+ \page_one_registered_text_area_b
+ {\box\b_page_one_contents}%
+ \egroup}
+
+\unexpanded\def\page_col_command_package_contents_one#1#2% \box \unvbox % this one will be redone (checked)
+ {\bgroup
+ \forgetall
+ % see one for comments as it is similar
+ \strc_notes_check_if_bottom_present
+ \d_page_one_natural_depth\dp#2\relax
+ \setbox\b_page_one_contents\vbox to \textheight
+ {\page_col_command_flush_top_insertions
+ \page_one_registered_text_area_a#1#2%
+ \hsize\d_page_col_column_width
+ \ifgridsnapping
+ \vskip\dimexpr\openstrutdepth-\d_page_one_natural_depth\relax
+ \prevdepth\openstrutdepth
+ \page_col_command_flush_bottom_insertions
+ \vfil
+ \else\ifcase\bottomraggednessmode
+ % ragged (default)
+ \vskip\dimexpr\openstrutdepth-\d_page_one_natural_depth\relax
+ \prevdepth\openstrutdepth
+ \page_col_command_flush_bottom_insertions
+ \vfil
+ \or
+ % align (normal)
+ \page_col_command_flush_bottom_insertions
+ \or
+ % baseline
+ \kern\dimexpr\maxdepth-\d_page_one_natural_depth\relax
+ \page_col_command_flush_bottom_insertions
+ \fi\fi
+ \fakepagenotes}%
+ \page_one_command_package_show_state
+ \ifconditional\c_notes_bottom_present
+ \ifgridsnapping
+ \ifcase\layoutlines
+ \getrawnoflines\textheight
+ \else
+ \noflines\layoutlines
+ \fi
+ \scratchoffset\dimexpr\numexpr\noflines-\plusone\relax\lineheight+\topskip\relax
+ \else
+ \scratchoffset\ht\b_page_one_contents
+ \fi
+ \setbox\b_page_one_bottom_notes\hpack
+ {\checksinglecolumnfootnotes % ?
+ \hsize\d_page_col_column_width
+ \setupnotes[\c!width=\textwidth]%
+ \lower\scratchoffset\vbox{\placebottomnotes\par\kern\zeropoint}}%
+ \ht\b_page_one_contents \zeropoint
+ \wd\b_page_one_contents \zeropoint
+ \ht\b_page_one_bottom_notes\zeropoint
+ \wd\b_page_one_bottom_notes\zeropoint
+ \wd\b_page_one_bottom_notes\d_page_col_column_width
+ \page_one_registered_text_area_b
+ {\vpack to \textheight
+ {\hbox{\box\b_page_one_contents\box\b_page_one_bottom_notes}}}%
+ \else
+ \ht\b_page_one_contents\textheight
+ \wd\b_page_one_contents\d_page_col_column_width
+ \page_one_registered_text_area_b
+ {\box\b_page_one_contents}%
+ \fi
+ \egroup}
+
+%D \unknown
+
+\unexpanded\def\page_col_command_side_float_output
+ {% % %
+ \ifvoid\namedinsertionnumber\s!topfloat\else
+ \scratchwidth\wd\namedinsertionnumber\s!topfloat
+ \ifdim\scratchwidth>\d_page_col_top_width
+ \global\d_page_col_top_width \scratchwidth
+ \fi
+ \global\d_page_col_top_height\ht\namedinsertionnumber\s!topfloat
+ \fi
+ % % %
+ \setbox\scratchbox\vbox\bgroup
+ \page_col_command_package_contents_one\unvbox\normalpagebox
+ \egroup
+ \putboxincache\s!pagecolumn{\number\c_page_col_current}\scratchbox
+ \ifnum\c_page_col_current=\c_page_col_n_of_columns
+ \page_col_routine_package
+ \page_otr_construct_and_shipout\box\normalpagebox\plusone
+ \global\c_page_col_current\plusone
+ \global\d_page_col_top_height\zeropoint
+ \global\d_page_col_top_width\zeropoint
+ % \page_col_command_flush_top_insertions
+ % \page_col_command_flush_floats
+ \else
+ \ifdim\d_page_col_top_width>\zeropoint
+ \ifdim\dimexpr\d_page_col_top_width>\d_page_col_sofar\relax
+ \begingroup
+ \floatingpenalty\zerocount
+ \insert\namedinsertionnumber\s!topfloat\bgroup
+ \vbox to \d_page_col_top_height{\vss}
+ % can be an option
+ \page_col_command_flush_top_insertions
+ \page_col_command_flush_floats
+ % so far till option
+ \egroup
+ \endgroup
+ \fi
+ \fi
+ \global\advance\c_page_col_current\plusone
+ \fi
+ %
+ \page_col_command_set_vsize
+ \page_col_command_set_hsize}
+
+% use \currentmixedcolumns instead of \recurselevel
+
+\def\page_col_routine_package_step
+ {% needs packaging anyway
+ \getboxfromcache{\s!pagecolumn}{\number\recurselevel}\scratchbox
+ \page_lines_add_numbers_to_box\scratchbox\recurselevel\c_page_col_n_of_columns\plusone % new
+ \page_marks_synchronize_column\plusone\c_page_col_n_of_columns\recurselevel\scratchbox
+ % backgrounds
+ \anch_mark_column_box\scratchbox
+ \mixedcolumnseparatorheight\ht\scratchbox
+ \mixedcolumnseparatordepth \dp\scratchbox
+ \inheritedpagecolumnsframedbox\recurselevel\scratchbox}
+
+\def\page_col_routine_package
+ {\global\setbox\normalpagebox\hbox to \makeupwidth\bgroup
+ \edef\p_separator{\pagecolumnsparameter\c!separator}%
+ \pagecolumnseparatorwidth\d_page_col_distance
+ \edef\p_direction{\pagecolumnsparameter\c!direction}%
+ \ifx\p_direction\v!reverse
+ \dostepwiserecurse\c_page_col_n_of_columns\plusone\minusone
+ {\page_col_routine_package_step
+ \ifnum\recurselevel>\plusone
+ \page_col_routine_package_separate
+ \fi}%
+ \else
+ \dorecurse\c_page_col_n_of_columns
+ {\page_col_routine_package_step
+ \ifnum\recurselevel<\c_page_col_n_of_columns
+ \page_col_routine_package_separate
+ \fi}%
+ \fi
+ \egroup
+ \resetboxesincache{\s!pagecolumn}}
+
+%D \unknown
+
+% \unexpanded\def\page_col_command_check_if_float_fits
+% {\ifconditional\c_page_floats_not_permitted
+% \global\setfalse\c_page_floats_room
+% %\else\ifabsdim\dimexpr\d_page_col_available-\naturalfloatwd\relax<\onepoint
+% \else\ifdim\dimexpr\d_page_col_available-\naturalfloatwd\relax>-\onepoint
+% \global\settrue\c_page_floats_room
+% \else
+% \global\setfalse\c_page_floats_room
+% \fi\fi
+% \ifconditional\c_page_floats_room
+% \begingroup
+% \scratchdimen\dimexpr\pagetotal+\lineheight\relax
+% \ifdim\scratchdimen>\pagegoal
+% \goodbreak
+% \ifdim\dimexpr\d_page_col_available-\naturalfloatwd\relax>\onepoint
+% \global\setfalse\c_page_floats_room
+% \else
+% \global\settrue\c_page_floats_room
+% \fi
+% \fi
+% \endgroup
+% \fi
+% \ifconditional\c_page_floats_room
+% \ifdim\pagetotal>\zeropoint
+% \scratchdimenone\dimexpr\pagetotal+\floatheight+\d_strc_floats_top-\pageshrink\relax
+% \scratchdimentwo\pagegoal
+% \relax % needed
+% \ifcase\c_page_one_float_method
+% % method 0 : raw
+% \or
+% % method 1 : safe
+% \advance\scratchdimentwo -\strutdp
+% \or
+% % method 2 : tight
+% \advance\scratchdimenone -\onepoint
+% \fi
+% \relax % really needed ! ! ! !
+% \ifdim\scratchdimenone>\scratchdimentwo
+% \global\setfalse\c_page_floats_room
+% \fi
+% \fi
+% \ifconditional\c_page_floats_room
+% \global\setbox\floatbox\hpack to \d_page_col_available{\hss\box\floatbox\hss}%
+% \fi
+% \fi}
+
+\unexpanded\def\page_col_command_check_if_float_fits
+ {\ifconditional\c_page_floats_not_permitted
+ % forget about it anyway
+ \global\setfalse\c_page_floats_room
+ \else
+ % first we check the current column
+% \ifdim\dimexpr\d_page_col_width-\naturalfloatwd\relax>-\onepoint
+ \ifdim\dimexpr\hsize-\naturalfloatwd\relax>-\onepoint
+ \global\settrue\c_page_floats_room
+ \else
+ \global\setfalse\c_page_floats_room
+ \fi
+ \ifconditional\c_page_floats_room
+ % we fit in the column but do we have room
+ \ifdim\dimexpr\pagetotal+\lineheight\relax>\pagegoal
+ % try again later
+ \goodbreak
+ \fi
+ \ifdim\pagetotal>\zeropoint
+ \scratchdimenone\dimexpr\pagetotal+\floatheight+\d_strc_floats_top-\pageshrink\relax
+ \scratchdimentwo\pagegoal
+ \relax % needed
+ \ifcase\c_page_one_float_method
+ % method 0 : raw
+ \or
+ % method 1 : safe
+ \advance\scratchdimentwo -\strutdp
+ \or
+ % method 2 : tight
+ \advance\scratchdimenone -\onepoint
+ \fi
+ \relax % really needed ! ! ! !
+ \ifdim\scratchdimenone>\scratchdimentwo
+ % there is no room, give up
+ \global\setfalse\c_page_floats_room
+ % now we can decide on a top float
+% \fi
+ \else
+% \ifconditional\c_page_floats_room
+% \global\setbox\floatbox\hpack to \d_page_col_float_available{\hss\box\floatbox\hss}%
+% \fi
+ \fi
+ \fi
+ \fi
+ \fi}
+
+%D \unknown
+
+\def\page_col_set_float_pack_hsize
+ {\ifnum\c_page_col_current=\c_page_col_n_of_columns
+ \c_page_col_current\plusone
+ \else
+ \advance\c_page_col_current\plusone
+ \fi
+ \page_col_command_set_hsize
+ \hsize\d_page_col_available}
+
+\unexpanded\def\page_col_command_flush_floats
+ {\global\settrue\c_page_floats_flushing
+ \ifconditional\c_page_floats_some_waiting
+ \par
+ \page_col_set_float_pack_hsize
+ \page_col_command_flush_floats_indeed
+ \fi
+ \global\savednoffloats\zerocount
+ \global\setfalse\c_page_floats_some_waiting
+ \global\setfalse\c_page_floats_flushing}
+
+\def\page_floats_show_pack_state_indeed#1%
+ {\llap{\smash{\backgroundline[black]{\strut\smallinfofont\white#1\space\the\nofcollectedfloats\space of\space\the\savednoffloats:\the\hsize}}\hskip.25\emwidth}}
+
+\installtextracker
+ {floats.collecting}
+ {\let\page_floats_show_pack_state\page_floats_show_pack_state_indeed}
+ {\let\page_floats_show_pack_state\gobbleoneargument}
+
+\let\page_floats_show_pack_state\gobbleoneargument
+
+\def\page_col_command_flush_floats_indeed % much in common with OTRSET
+ {\ifconditional\c_page_floats_some_waiting
+ \ifconditional\c_page_floats_pack_flushed
+ \setfalse\c_page_floats_center_box % not needed as we do call directly
+ %
+ \page_floats_collect\s!text\hsize\emwidth
+ %
+ \ifnum\nofcollectedfloats=\plusone
+ \ifdim\naturalfloatwd>\hsize
+ \nofcollectedfloats\zerocount
+ \fi
+ \fi
+ \ifnum\nofcollectedfloats>\zerocount
+ \global\setbox\floatbox\hpack to \hsize
+ {\page_floats_show_pack_state F%
+ \hfil
+ \dorecurse\nofcollectedfloats
+ {\ifcase\columndirection % nog document wide
+ \page_floats_flush\s!text\plusone
+ \else
+ \page_floats_flush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
+ \fi
+ % this could happen at the lua end instead
+ \scratchdimen\dimexpr\wd\floatbox-\naturalfloatwd\relax
+ \ifdim\scratchdimen<\zeropoint
+ \global\setbox\floatbox\hpack spread -\scratchdimen{\hss\box\floatbox\hss}%
+ \fi
+ %
+ \ifdim\wd\floatbox>\textwidth % \hsize
+ \hpack to \textwidth{\hss\box\floatbox\hss}% \textwidth
+ \else
+ \box\floatbox
+ \fi
+ \ifnum\recurselevel<\nofcollectedfloats
+ \hfil
+ \fi}%
+ \hfil}%
+ \doplacefloatbox
+ % \page_one_insert_top_float
+ \doubleexpandafter\page_col_command_flush_floats_indeed
+ \else
+ % todo
+ \fi
+ \else
+ \page_floats_get
+ % \page_one_insert_top_float
+ \doplacefloatbox
+ \doubleexpandafter\page_col_command_flush_floats_indeed
+ \fi
+ \fi}
+
+\unexpanded\def\page_col_command_flush_saved_floats % like one
+ {\global\d_page_floats_inserted_top\zeropoint
+ \global\d_page_floats_inserted_bottom\zeropoint
+ \ifconditional\c_page_floats_flushing \else
+ \page_col_command_set_top_insertions
+ \page_col_command_set_bottom_insertions
+ \ifconditional\c_page_floats_some_waiting
+ \doif{\rootfloatparameter\c!cache}\v!no\page_col_command_flush_floats % could be _otr_
+ \else\ifconditional\c_page_margin_blocks_present
+ \page_col_command_flush_floats
+ \fi\fi
+ \fi}
+
+\unexpanded\def\page_col_command_set_top_insertions
+ {\bgroup
+ \ifconditional\c_page_floats_some_waiting
+ \noffloatinserts\zerocount
+ \let\totaltopinserted\!!zeropoint
+ \page_col_set_float_pack_hsize
+ \page_col_command_set_top_insertions_indeed
+ \ifnum\rootfloatparameter\c!nbottom=\zerocount
+ \ifnum\rootfloatparameter\c!nlines>\zerocount
+ \ifdim\totaltopinserted>\zeropoint\relax
+ \ifdim\dimexpr\rootfloatparameter\c!nlines\lineheight+\totaltopinserted\relax>\textheight
+ \showmessage\m!floatblocks8{\rootfloatparameter\c!nlines}%
+ \page_otr_fill_and_eject_page % was tripple: vfilll
+ \fi
+ \fi
+ \fi
+ \fi
+ \fi
+ \egroup}
+
+\def\d_page_col_collected_top_float_height % pseudo
+ {\dimexpr
+ \d_page_floats_inserted_top +
+ \maxcollectedfloatstotal +
+ \ifdim\d_strc_floats_top>\d_strc_floats_bottom
+ \d_strc_floats_top
+ \else
+ \d_strc_floats_bottom
+ \fi
+ \relax}
+
+\def\page_col_command_set_top_insertions_indeed
+ {\ifnum\noffloatinserts<\c_page_floats_n_of_top
+ \ifcase\savednoffloats
+ \let\page_col_command_set_top_insertions_indeed\relax
+ \else
+ \page_floats_collect\s!text\hsize\emwidth
+ \ifdim\d_page_col_collected_top_float_height<\textheight
+ \global\setbox\floatbox\hpack to \hsize
+ {\page_floats_show_pack_state T%
+ \hfil
+ \dorecurse\nofcollectedfloats
+ {\ifcase\columndirection % nog document wide
+ \page_floats_flush\s!text\plusone
+ \else
+ \page_floats_flush\s!text{\the\numexpr\nofcollectedfloats-\recurselevel+1\relax}%
+ \fi
+ % this could happen at the lua end instead
+ \scratchdimen\dimexpr\wd\floatbox-\naturalfloatwd\relax
+ \ifdim\scratchdimen<\zeropoint
+ \global\setbox\floatbox\hpack spread -\scratchdimen{\hss\box\floatbox\hss}%
+ \fi
+ %
+ \ifdim\wd\floatbox>\makeupwidth % \hsize
+ \hpack to \makeupwidth{\hss\box\floatbox\hss}%
+ \else
+ \box\floatbox
+ \fi
+ \ifnum\recurselevel<\nofcollectedfloats
+ \hfil
+ \fi}%
+ \hfil}%
+ \page_one_prepare_top_float
+ \xdef\totaltopinserted{\the\d_page_floats_inserted_top}%
+ \page_one_insert_top_float
+ \ifconditional\c_page_floats_some_waiting
+ \advance\noffloatinserts \plusone
+ \else
+ \noffloatinserts\c_page_floats_n_of_top\relax
+ \fi
+ \page_floats_report_flushed
+ \else
+ \let\page_col_command_set_top_insertions_indeed\relax
+ \fi
+ \fi
+ \else
+ \ifconditional\c_page_floats_some_waiting
+ \showmessage\m!floatblocks6{\the\c_page_floats_n_of_top}%
+ \fi
+ \let\page_col_command_set_top_insertions_indeed\relax
+ \fi
+ \page_col_command_set_top_insertions_indeed}
+
+\let\page_col_command_flush_top_insertions \page_one_command_flush_top_insertions
+\let\page_col_command_flush_bottom_insertions\page_one_command_flush_bottom_insertions
+
+%let\page_col_command_set_top_insertions \page_one_command_set_top_insertions
+\let\page_col_command_set_bottom_insertions \page_one_command_set_bottom_insertions
+
+%let\page_col_command_set_top_insertions_indeed \page_one_command_set_top_insertions
+\let\page_col_command_set_bottom_insertions_indeed \page_one_command_set_botttom_insertions
+
+\let\page_col_command_flush_float_box \page_one_command_flush_float_box
+\let\page_col_command_synchronize_side_floats \page_one_command_synchronize_side_floats
+\let\page_col_command_flush_side_floats \page_one_command_flush_side_floats
+\let\page_col_command_flush_margin_blocks \page_one_command_flush_margin_blocks
+\let\page_col_command_test_page \page_one_command_test_page
+
+%D The separator code is more or less the same as mixed columns but we need
+%D to compensate for the top floats so we comment a bit for now.
+
+\newdimen\pagecolumnseparatorheight
+\newdimen\pagecolumnseparatordepth
+\newdimen\pagecolumnseparatorwidth
+
+% \installcorenamespace{pagecolumnsseparator}
+%
+% \unexpanded\def\installpagecolumnseparator#1#2%
+% {\setvalue{\??pagecolumnsseparator#1}{#2}}
+%
+% \installpagecolumnseparator\v!rule
+% {\vrule
+% \s!width \pagecolumnsparameter\c!rulethickness
+% \s!height\pagecolumnseparatorheight
+% \s!depth \pagecolumnseparatordepth
+% \relax}
+%
+% \def\page_col_routine_package_separate
+% {\ifcsname\??pagecolumnsseparator\p_separator\endcsname
+% \page_col_command_inject_separator
+% \else
+% \hss
+% \fi}
+%
+% \unexpanded\def\page_col_command_inject_separator
+% {\begingroup
+% \setbox\scratchbox\hbox to \zeropoint \bgroup
+% \hss
+% \starttextproperties
+% \usepagecolumnscolorparameter\c!rulecolor
+% \begincsname\??pagecolumnsseparator\p_separator\endcsname % was \c!rule
+% \stoptextproperties
+% \hss
+% \egroup
+% \ht\scratchbox\zeropoint
+% \dp\scratchbox\zeropoint
+% \hss
+% \box\scratchbox
+% \hss
+% \endgroup}
+
+\def\page_col_routine_package_separate
+ {\hss}
+
+%D \unknown
+
+\unexpanded\def\page_col_command_routine
+ {\page_sides_output_routine}
+
+% % not:
+%
+% \unexpanded\def\page_col_command_routine
+% {\ifconditional\c_page_sides_short
+% \page_sides_output_routine_yes_column
+% \else
+% \page_sides_output_routine_nop_column
+% \fi}
+%
+% \let\page_sides_output_routine_nop_column\page_sides_output_routine
+%
+% \def\page_sides_output_routine_yes_column % this might become the main one too
+% {\unvbox\normalpagebox % bah, and the discards?
+% %\page_col_column
+% \column % \page
+% % why was this \global\holdinginserts\zerocount
+% \global\setfalse\c_page_sides_short}
+
+\let\page_col_command_flush_all_floats\relax
+
+%D \unknown
+
+\defineoutputroutine
+ [\s!pagecolumn]
+ [\s!page_otr_command_routine =\page_col_command_routine,
+ \s!page_otr_command_package_contents =\page_col_command_package_contents,
+ \s!page_otr_command_set_vsize =\page_col_command_set_vsize,
+ \s!page_otr_command_set_hsize =\page_col_command_set_hsize,
+ % \s!page_otr_command_synchronize_hsize =\page_col_command_synchronize_hsize, % not done
+ \s!page_otr_command_next_page =\page_col_command_next_page,
+ \s!page_otr_command_next_page_and_inserts =\page_col_command_next_page_and_inserts,
+ \s!page_otr_command_set_top_insertions =\page_col_command_set_top_insertions,
+ \s!page_otr_command_set_bottom_insertions =\page_col_command_set_bottom_insertions,
+ \s!page_otr_command_flush_top_insertions =\page_col_command_flush_top_insertions,
+ \s!page_otr_command_flush_bottom_insertions=\page_col_command_flush_bottom_insertions,
+ \s!page_otr_command_check_if_float_fits =\page_col_command_check_if_float_fits,
+ % \s!page_otr_command_set_float_hsize =\page_col_command_set_float_hsize, % not done
+ \s!page_otr_command_flush_float_box =\page_col_command_flush_float_box,
+ \s!page_otr_command_side_float_output =\page_col_command_side_float_output,
+ \s!page_otr_command_synchronize_side_floats=\page_col_command_synchronize_side_floats,
+ \s!page_otr_command_flush_floats =\page_col_command_flush_floats,
+ \s!page_otr_command_flush_side_floats =\page_col_command_flush_side_floats,
+ \s!page_otr_command_flush_saved_floats =\page_col_command_flush_saved_floats,
+ \s!page_otr_command_flush_all_floats =\page_col_command_flush_all_floats,
+ \s!page_otr_command_flush_margin_blocks =\page_col_command_flush_margin_blocks,
+ \s!page_otr_command_test_column =\page_col_command_test_page
+ ]
+
+%D \unknown
+
+\installfloatmethod \s!pagecolumn \v!here \page_one_place_float_here
+\installfloatmethod \s!pagecolumn \v!force \page_one_place_float_force
+\installfloatmethod \s!pagecolumn \v!left \page_one_place_float_left
+\installfloatmethod \s!pagecolumn \v!right \page_one_place_float_right
+\installfloatmethod \s!pagecolumn \v!text \page_one_place_float_text
+\installfloatmethod \s!pagecolumn \v!top \page_one_place_float_top
+\installfloatmethod \s!pagecolumn \v!bottom \page_one_place_float_bottom
+\installfloatmethod \s!pagecolumn \v!auto \page_one_place_float_auto
+\installfloatmethod \s!pagecolumn \v!margin \page_one_place_float_margin
+\installfloatmethod \s!pagecolumn \v!opposite \page_one_place_float_face
+\installfloatmethod \s!pagecolumn \v!page \page_one_place_float_page
+\installfloatmethod \s!pagecolumn \v!leftpage \page_one_place_float_leftpage
+\installfloatmethod \s!pagecolumn \v!rightpage \page_one_place_float_rightpage
+\installfloatmethod \s!pagecolumn \v!inmargin \page_one_place_float_inmargin
+\installfloatmethod \s!pagecolumn \v!inleft \page_one_place_float_leftmargin
+\installfloatmethod \s!pagecolumn \v!inright \page_one_place_float_rightmargin
+\installfloatmethod \s!pagecolumn \v!leftmargin \page_one_place_float_leftmargin
+\installfloatmethod \s!pagecolumn \v!rightmargin \page_one_place_float_rightmargin
+\installfloatmethod \s!pagecolumn \v!leftedge \page_one_place_float_leftedge
+\installfloatmethod \s!pagecolumn \v!rightedge \page_one_place_float_rightedge
+\installfloatmethod \s!pagecolumn \v!somewhere \page_one_place_float_somewhere
+\installfloatmethod \s!pagecolumn \v!backspace \page_one_place_float_backspace
+\installfloatmethod \s!pagecolumn \v!cutspace \page_one_place_float_cutspace
+%installfloatmethod \s!pagecolumn \s!tblr \page_one_place_float_top
+%installfloatmethod \s!pagecolumn \s!lrtb \page_one_place_float_top
+%installfloatmethod \s!pagecolumn \s!tbrl \page_one_place_float_top
+%installfloatmethod \s!pagecolumn \s!fxtb \page_one_place_float_top
+%installfloatmethod \s!pagecolumn \s!rltb \page_one_place_float_top
+%installfloatmethod \s!pagecolumn \s!btlr \page_one_place_float_bottom
+%installfloatmethod \s!pagecolumn \s!lrbt \page_one_place_float_bottom
+%installfloatmethod \s!pagecolumn \s!btrl \page_one_place_float_bottom
+%installfloatmethod \s!pagecolumn \s!rlbt \page_one_place_float_bottom
+%installfloatmethod \s!pagecolumn \s!fxbt \page_one_place_float_bottom
+%installfloatmethod \s!pagecolumn \s!fixd \page_one_place_float_force
+
+\installfloatmethod \s!pagecolumn \v!local \somelocalfloat
+
+%D The main interface:
+
+\installcorenamespace{pagecolumns}
+
+\installframedcommandhandler \??pagecolumns {pagecolumns} \??pagecolumns
+
+\setuppagecolumns
+ [\c!distance=1.5\bodyfontsize,
+ \c!n=\plustwo,
+ %\c!align=, % inherit (also replaces tolerance)
+ %\c!before=,
+ %\c!after=,
+ %\c!separator=\v!none,
+ %\c!setups=,
+ %\c!balance=\v!no,
+ %\c!blank={\v!line,\v!fixed}, yes or no
+ \c!frame=\v!off,
+ \c!strut=\v!no,
+ \c!offset=\v!overlay,
+ %\c!maxheight=\textheight,
+ \c!maxwidth=\makeupwidth,
+ %\c!grid=\v!tolerant,
+ %\c!internalgrid=\v!line,
+ \c!direction=\v!normal]
+
+\let\startpagecolumns\relax % defined later
+\let\stoppagecolumns \relax % defined later
+
+\appendtoks % could become an option
+ \setuevalue{\e!start\currentpagecolumns}{\startpagecolumns[\currentpagecolumns]}%
+ \setuevalue{\e!stop \currentpagecolumns}{\stoppagecolumns}%
+\to \everydefinepagecolumns
+
+\ifdefined \columnwidth \else \newdimen\columnwidth \fi
+\ifdefined \columndistance \else \newdimen\columndistance \fi
+
+\unexpanded\def\startpagecolumns
+ {\page
+ \begingroup
+ \begingroup
+ \dosingleempty\page_col_start}
+
+\unexpanded\def\page_col_start[#1]%
+ {\doifelseassignment{#1}%
+ {\let\currentpagecolumns\empty
+ \setuppagecolumns[#1]}%
+ {\edef\currentpagecolumns{#1}}%
+ \c_page_col_n_of_columns\pagecolumnsparameter\c!n\relax
+ \ifnum\c_page_col_n_of_columns>\plusone
+ \expandafter\page_col_start_yes
+ \else
+ \expandafter\page_col_start_nop
+ \fi} % public
+
+\unexpanded\def\page_col_start_yes
+ {\d_page_col_distance \pagecolumnsparameter\c!distance\relax
+ % \d_page_col_max_height \pagecolumnsparameter\c!maxheight
+ \d_page_col_max_width \pagecolumnsparameter\c!maxwidth
+ % \d_page_col_balance_step \pagecolumnsparameter\c!step
+ \c_page_col_current \plusone
+ %
+ \d_page_col_column_width\dimexpr(\d_page_col_max_width-\d_page_col_distance*\numexpr(\c_page_col_n_of_columns-\plusone)\relax)/\c_page_col_n_of_columns\relax
+ %
+ \columnwidth \d_page_col_column_width
+ \columndistance \d_page_col_distance
+ %
+ \nopenalties
+ %
+ % \insidecolumnstrue % NO!
+ %
+ \let\column\page_col_column
+ %
+ \def\page_floats_get_used_hsize{\makeupwidth} % a bit of a hack
+ %
+ \usealignparameter \pagecolumnsparameter
+ \useblankparameter \pagecolumnsparameter
+ % \useprofileparameter\pagecolumnsparameter
+ %
+ \usemixedcolumnscolorparameter\c!color
+ %
+ \setupnotes[\c!width=\textwidth]%
+ %
+ \usesetupsparameter\pagecolumnsparameter
+ %
+ \setupoutputroutine[\s!pagecolumn]%
+ %
+ \setupfloats[\c!ntop=\plusthousand]%
+ % \setupfloats[\c!nbottom=\plusthousand]%
+ %
+ \page_col_command_set_vsize
+ \page_col_command_set_hsize
+ %
+ \nofcolumns\c_page_mix_n_of_columns
+ %
+ \unexpanded\def\page_col_start[##1]%
+ {\page_col_start_nop}%
+ %
+ \let\stoppagecolumns\page_col_stop_yes}
+
+\unexpanded\def\page_col_start_nop
+ {\nofcolumns\c_page_mix_n_of_columns
+ \let\stoppagecolumns\page_col_stop_nop}
+
+\unexpanded\def\page_col_stop_yes
+ {\column % \page_otr_eject_page
+ \page
+ \endgroup
+ % \setupoutputroutine[\s!singlecolumn]%
+ \page_otr_command_set_vsize
+ \page_otr_command_set_hsize
+ \page
+ \endgroup}
+
+\unexpanded\def\page_col_stop_nop
+ {\page
+ \endgroup
+ \endgroup}
+
+\protect \endinput
diff --git a/tex/context/base/mkiv/page-pst.mkiv b/tex/context/base/mkiv/page-pst.mkiv
index f12663f66..746356495 100644
--- a/tex/context/base/mkiv/page-pst.mkiv
+++ b/tex/context/base/mkiv/page-pst.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Page Macros / Postponing}
-\registerctxluafile{page-pst}{1.001}
+\registerctxluafile{page-pst}{}
\unprotect
@@ -69,10 +69,15 @@
% officially we should flush again after a flush as there can be new future pages
% but that will be looked into when we run into it
+\c_page_postponed_mode\plusone
+
\unexpanded\def\page_postponed_blocks_flush_indeed
{\begingroup
\setsystemmode\v!postponing
\settrue\c_page_postponed_busy
+ \ifnum\c_page_postponed_mode=\plusone
+ \savepagecontent
+ \fi
\pagetotal\zeropoint % here? still needed? (was after flush pagefloats)
\the\everytopofpage\relax
%\flushrestfloats
@@ -84,6 +89,9 @@
% page {123}
\relax
\page_otr_command_flush_floats % new but potential dangerous, maybe we need a classification
+ \ifnum\c_page_postponed_mode=\plusone
+ \flushpagecontent
+ \fi
\endgroup} % of blocks: with and without flush
\unexpanded\def\page_postponed_blocks_flush
diff --git a/tex/context/base/mkiv/page-run.lua b/tex/context/base/mkiv/page-run.lua
new file mode 100644
index 000000000..c0a2022b2
--- /dev/null
+++ b/tex/context/base/mkiv/page-run.lua
@@ -0,0 +1,188 @@
+if not modules then modules = { } end modules ['page-run'] = {
+ version = 1.001,
+ comment = "companion to page-run.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, concat = string.format, table.concat
+local todimen = number.todimen
+local texdimen = tex.dimen
+
+local function asdimen(name,unit)
+ return todimen(texdimen[name],unit,"%0.4f") -- 4 is more than enough, even 3 would be okay
+end
+
+local function checkedoptions(options)
+ if type(options) == "table" then
+ return options
+ elseif not options or options == "" then
+ options = "pt,cm"
+ end
+ options = utilities.parsers.settings_to_hash(options)
+ local n = 4
+ for k, v in table.sortedhash(options) do
+ local m = tonumber(k)
+ if m then
+ n = m
+ end
+ end
+ options.n = n
+ return options
+end
+
+function commands.showlayoutvariables(options)
+
+ options = checkedoptions(options)
+
+ local dimensions = { "pt", "bp", "cm", "mm", "dd", "cc", "pc", "nd", "nc", "sp", "in" }
+
+ local n = 0
+ for i=1,#dimensions do
+ if options[dimensions[i]] then
+ n = n + 1
+ end
+ end
+
+ if n == 0 then
+ options.pt = true
+ n = 1
+ end
+
+ local function showdimension(name)
+ context.NC()
+ context.tex(interfaces.interfacedcommand(name))
+ context.NC()
+ for i=1,#dimensions do
+ local d = dimensions[i]
+ if options[d] then
+ context("%s%s",asdimen(name,d),d)
+ context.NC()
+ end
+ end
+ context.NR()
+ end
+
+ local function showmacro(name)
+ context.NC()
+ context.tex(interfaces.interfacedcommand(name))
+ context.NC()
+ context.getvalue(name)
+ context.NC()
+ context.NR()
+ end
+
+ local function reportdimension(name)
+ local result = { }
+ for i=1,#dimensions do
+ local d = dimensions[i]
+ if options[d] then
+ result[#result+1] = format("%12s%s",asdimen(name,d),d)
+ end
+ end
+ commands.writestatus("layout",format("%-24s %s",interfaces.interfacedcommand(name),concat(result," ")))
+ end
+
+ if tex.count.textlevel == 0 then
+
+ -- especially for Luigi:
+
+ reportdimension("paperheight")
+ reportdimension("paperwidth")
+ reportdimension("printpaperheight")
+ reportdimension("printpaperwidth")
+ reportdimension("topspace")
+ reportdimension("backspace")
+ reportdimension("makeupheight")
+ reportdimension("makeupwidth")
+ reportdimension("topheight")
+ reportdimension("topdistance")
+ reportdimension("headerheight")
+ reportdimension("headerdistance")
+ reportdimension("textheight")
+ reportdimension("footerdistance")
+ reportdimension("footerheight")
+ reportdimension("bottomdistance")
+ reportdimension("bottomheight")
+ reportdimension("leftedgewidth")
+ reportdimension("leftedgedistance")
+ reportdimension("leftmarginwidth")
+ reportdimension("leftmargindistance")
+ reportdimension("textwidth")
+ reportdimension("rightmargindistance")
+ reportdimension("rightmarginwidth")
+ reportdimension("rightedgedistance")
+ reportdimension("rightedgewidth")
+ reportdimension("bodyfontsize")
+ reportdimension("lineheight")
+
+ else
+
+ context.starttabulate { "|l|" .. string.rep("Tr|",n) }
+
+ showdimension("paperheight")
+ showdimension("paperwidth")
+ showdimension("printpaperheight")
+ showdimension("printpaperwidth")
+ showdimension("topspace")
+ showdimension("backspace")
+ showdimension("makeupheight")
+ showdimension("makeupwidth")
+ showdimension("topheight")
+ showdimension("topdistance")
+ showdimension("headerheight")
+ showdimension("headerdistance")
+ showdimension("textheight")
+ showdimension("footerdistance")
+ showdimension("footerheight")
+ showdimension("bottomdistance")
+ showdimension("bottomheight")
+ showdimension("leftedgewidth")
+ showdimension("leftedgedistance")
+ showdimension("leftmarginwidth")
+ showdimension("leftmargindistance")
+ showdimension("textwidth")
+ showdimension("rightmargindistance")
+ showdimension("rightmarginwidth")
+ showdimension("rightedgedistance")
+ showdimension("rightedgewidth")
+ context.NR()
+ showdimension("bodyfontsize")
+ showdimension("lineheight")
+ context.NR()
+ showmacro("strutheightfactor")
+ showmacro("strutdepthfactor")
+ showmacro("topskipfactor")
+ showmacro("maxdepthfactor")
+
+ context.stoptabulate()
+
+ end
+
+end
+
+function commands.showlayout(options)
+
+ options = checkedoptions(options)
+
+ if tex.count.textlevel == 0 then
+
+ commands.showlayoutvariables(options)
+
+ else
+
+ context.page()
+ context.bgroup()
+ context.showframe()
+ context.setuplayout { marking = interfaces.variables.on }
+ for i=1,(options.n or 4) do
+ commands.showlayoutvariables(options)
+ context.page()
+ end
+ context.egroup()
+
+ end
+
+end
+
diff --git a/tex/context/base/mkiv/page-run.mkiv b/tex/context/base/mkiv/page-run.mkiv
index 9adcb23c7..19adfaa9c 100644
--- a/tex/context/base/mkiv/page-run.mkiv
+++ b/tex/context/base/mkiv/page-run.mkiv
@@ -13,10 +13,11 @@
\writestatus{loading}{ConTeXt Page Macros / Runtime Macros}
+\registerctxluafile{page-run}{}
+
\unprotect
-\gdef\showprint
- {\dotripleempty\page_layouts_show_print}
+\unexpanded\gdef\showprint{\dotripleempty\page_layouts_show_print}
\gdef\page_layouts_show_print[#1][#2][#3]% only english
{\setbuffer[crap]%
@@ -70,189 +71,9 @@
% {\showprint[mirrored] [mirrored] [location=middle]} {\type{mirrored}\break\type{mirrored}}
% \stopcombination
-% maybe we will have page-run.lua
-
-\startluacode
-local format, concat = string.format, table.concat
-local todimen = number.todimen
-local texdimen = tex.dimen
-
-local function asdimen(name,unit)
- return todimen(texdimen[name],unit,"%0.4f") -- 4 is more than enough, even 3 would be okay
-end
-
-local function checkedoptions(options)
- if type(options) == "table" then
- return options
- elseif not options or options == "" then
- options = "pt,cm"
- end
- options = utilities.parsers.settings_to_hash(options)
- local n = 4
- for k, v in table.sortedhash(options) do
- local m = tonumber(k)
- if m then
- n = m
- end
- end
- options.n = n
- return options
-end
-
-function commands.showlayoutvariables(options)
-
- options = checkedoptions(options)
-
- local dimensions = { "pt", "bp", "cm", "mm", "dd", "cc", "pc", "nd", "nc", "sp", "in" }
-
- local n = 0
- for i=1,#dimensions do
- if options[dimensions[i]] then
- n = n + 1
- end
- end
-
- if n == 0 then
- options.pt = true
- n = 1
- end
-
- local function showdimension(name)
- context.NC()
- context.tex(interfaces.interfacedcommand(name))
- context.NC()
- for i=1,#dimensions do
- local d = dimensions[i]
- if options[d] then
- context("%s%s",asdimen(name,d),d)
- context.NC()
- end
- end
- context.NR()
- end
-
- local function showmacro(name)
- context.NC()
- context.tex(interfaces.interfacedcommand(name))
- context.NC()
- context.getvalue(name)
- context.NC()
- context.NR()
- end
-
- local function reportdimension(name)
- local result = { }
- for i=1,#dimensions do
- local d = dimensions[i]
- if options[d] then
- result[#result+1] = format("%12s%s",asdimen(name,d),d)
- end
- end
- commands.writestatus("layout",format("%-24s %s",interfaces.interfacedcommand(name),concat(result," ")))
- end
-
- if tex.count.textlevel == 0 then
-
- -- especially for Luigi:
-
- reportdimension("paperheight")
- reportdimension("paperwidth")
- reportdimension("printpaperheight")
- reportdimension("printpaperwidth")
- reportdimension("topspace")
- reportdimension("backspace")
- reportdimension("makeupheight")
- reportdimension("makeupwidth")
- reportdimension("topheight")
- reportdimension("topdistance")
- reportdimension("headerheight")
- reportdimension("headerdistance")
- reportdimension("textheight")
- reportdimension("footerdistance")
- reportdimension("footerheight")
- reportdimension("bottomdistance")
- reportdimension("bottomheight")
- reportdimension("leftedgewidth")
- reportdimension("leftedgedistance")
- reportdimension("leftmarginwidth")
- reportdimension("leftmargindistance")
- reportdimension("textwidth")
- reportdimension("rightmargindistance")
- reportdimension("rightmarginwidth")
- reportdimension("rightedgedistance")
- reportdimension("rightedgewidth")
- reportdimension("bodyfontsize")
- reportdimension("lineheight")
-
- else
-
- context.starttabulate { "|l|" .. string.rep("Tr|",n) }
-
- showdimension("paperheight")
- showdimension("paperwidth")
- showdimension("printpaperheight")
- showdimension("printpaperwidth")
- showdimension("topspace")
- showdimension("backspace")
- showdimension("makeupheight")
- showdimension("makeupwidth")
- showdimension("topheight")
- showdimension("topdistance")
- showdimension("headerheight")
- showdimension("headerdistance")
- showdimension("textheight")
- showdimension("footerdistance")
- showdimension("footerheight")
- showdimension("bottomdistance")
- showdimension("bottomheight")
- showdimension("leftedgewidth")
- showdimension("leftedgedistance")
- showdimension("leftmarginwidth")
- showdimension("leftmargindistance")
- showdimension("textwidth")
- showdimension("rightmargindistance")
- showdimension("rightmarginwidth")
- showdimension("rightedgedistance")
- showdimension("rightedgewidth")
- context.NR()
- showdimension("bodyfontsize")
- showdimension("lineheight")
- context.NR()
- showmacro("strutheightfactor")
- showmacro("strutdepthfactor")
- showmacro("topskipfactor")
- showmacro("maxdepthfactor")
-
- context.stoptabulate()
-
- end
-
-end
-
-function commands.showlayout(options)
-
- options = checkedoptions(options)
-
- if tex.count.textlevel == 0 then
-
- commands.showlayoutvariables(options)
-
- else
-
- context.page()
- context.bgroup()
- context.showframe()
- context.setuplayout { marking = interfaces.variables.on }
- for i=1,(options.n or 4) do
- commands.showlayoutvariables(options)
- context.page()
- end
- context.egroup()
-
- end
-
-end
-\stopluacode
+\unexpanded\gdef\showframe {\dodoubleempty\page_layouts_show_frame}
+\unexpanded\gdef\showsetups{\dosingleempty\page_layouts_show_setups}
+\unexpanded\gdef\showlayout{\dosingleempty\page_layouts_show_layout}
\gdef\page_layouts_show_frame[#1][#2]%
{\ifsecondargument
@@ -293,10 +114,6 @@ end
\gdef\page_layouts_show_layout[#1]%
{\ctxcommand{showlayout("#1")}}
-\unexpanded\gdef\showframe {\dodoubleempty\page_layouts_show_frame }
-\unexpanded\gdef\showsetups{\dosingleempty\page_layouts_show_setups}
-\unexpanded\gdef\showlayout{\dosingleempty\page_layouts_show_layout}
-
\unexpanded\gdef\showmargins
{\starttabulate
\NC asynchrone \NC \doifelseoddpage {odd} {even} \NC \NR
@@ -307,4 +124,94 @@ end
\NC inner margin \NC \the\innermarginwidth \NC \NR
\stoptabulate}
+%D Page grids:
+
+\newconstant\c_page_grids_location
+\newconstant\c_page_grids_line_mode
+\newconstant\c_page_grids_lineno_mode
+\newconstant\c_page_grids_columns_mode
+
+\unexpanded\gdef\showgrid
+ {\dosingleempty\page_grids_show}
+
+\gdef\page_grids_show[#1]%
+ {\c_page_grids_location \plusone % downward compatible default
+ \c_page_grids_line_mode \plusone
+ \c_page_grids_lineno_mode \plusone
+ \c_page_grids_columns_mode\plusone
+ \processallactionsinset
+ [#1]%
+ [ \v!reset=>\c_page_grids_location \zerocount
+ \c_page_grids_columns_mode\zerocount,
+ \v!bottom=>\c_page_grids_location \plusone,
+ \v!top=>\c_page_grids_location \plustwo,
+ \v!none=>\c_page_grids_line_mode \zerocount,
+ \v!all=>\c_page_grids_line_mode \plusone,
+ \v!lines=>\c_page_grids_line_mode \plustwo,
+ \v!frame=>\c_page_grids_line_mode \plusthree,
+ \v!nonumber=>\c_page_grids_lineno_mode \zerocount,
+ \v!right=>\c_page_grids_lineno_mode \plusone,
+ \v!left=>\c_page_grids_lineno_mode \plustwo,
+ \v!outer=>\c_page_grids_lineno_mode \plusthree,
+ \v!columns=>\c_page_grids_columns_mode\plusone]% new option
+ \ifcase\c_page_grids_location
+ \let\page_grids_add_to_box\gobbleoneargument
+ \else % 1=bottom 2=top
+ \let\page_grids_add_to_box\page_grids_add_to_box_indeed
+ \fi
+ \ifcase\c_page_grids_columns_mode
+ \let\page_grids_add_to_one\gobbleoneargument
+ \let\page_grids_add_to_mix\gobbleoneargument
+ \else
+ \let\page_grids_add_to_one\page_grids_add_to_one_indeed
+ \let\page_grids_add_to_mix\page_grids_add_to_mix_indeed
+ \fi}
+
+% if really needed for speed we can cache the grid
+
+\glet\page_grids_add_to_box\gobbleoneargument
+\glet\page_grids_add_to_one\gobbleoneargument
+\glet\page_grids_add_to_mix\gobbleoneargument
+
+\gdef\page_grids_add_to_box_indeed#1% to be checked for color and layer ..... use mp
+ {\startcolor[layout:grid]%
+ \resetvisualizers
+ \gridboxlinemode \c_page_grids_line_mode
+ \gridboxlinenomode\c_page_grids_lineno_mode
+ \setgridbox\scratchbox\makeupwidth\textheight % todo: check color
+ \global\setbox#1\hbox % global ?
+ {\ifcase\c_page_grids_location\or\or\box#1\hskip-\makeupwidth\fi
+ \begingroup % color
+ \ifcase\layoutcolumns\else
+ \gray
+ \setlayoutcomponentattribute{\v!grid:\v!columns}%
+ \hbox \layoutcomponentboxattribute to \makeupwidth
+ {\dorecurse\layoutcolumns
+ {\hskip\layoutcolumnwidth
+ \ifnum\recurselevel<\layoutcolumns
+ \vrule
+ \s!height\ht\scratchbox
+ \s!depth \dp\scratchbox
+ \s!width \layoutcolumndistance
+ \fi}}%
+ \hskip-\makeupwidth
+ \fi
+ \setlayoutcomponentattribute{\v!grid:\v!lines}%
+ \hbox \layoutcomponentboxattribute{\box\scratchbox}%
+ \endgroup
+ \ifcase\c_page_grids_location\or\hskip-\makeupwidth\box#1\fi}%
+ \stopcolor}
+
+\gdef\page_grids_add_to_one_indeed#1%
+ {\begingroup
+ \resetvisualizers
+ \global\setbox#1\vpack{\backgroundline[layout:one]{\box#1}}%
+ \endgroup}
+
+\gdef\page_grids_add_to_mix_indeed#1%
+ {\begingroup
+ \resetvisualizers
+ \global\setbox#1\vpack{\backgroundline[layout:mix]{\box#1}}%
+ \endgroup}
+
\protect \endinput
diff --git a/tex/context/base/mkiv/page-set.mkiv b/tex/context/base/mkiv/page-set.mkiv
index 2cea31e1c..fb6f607a1 100644
--- a/tex/context/base/mkiv/page-set.mkiv
+++ b/tex/context/base/mkiv/page-set.mkiv
@@ -14,6 +14,8 @@
% not yet mkiv! probably namespace issues with localframed (only one left)
%
% getnoflines vs getrawnoflines
+%
+% can have some vpack and hpack
% This is a rather old mechanism and we can best keep it as it is. If it gets
% replaced by a more modern solution, it will be an extra mechanism. So, we
@@ -410,7 +412,7 @@
\letcolumnsetparameter\c!height\!!heighta
\letcolumnsetparameter\c!lines \empty
\letcolumnsetparameter\c!region\currentcolumnset
- \setbox\scratchbox\hbox\inheritedcolumnsetframed{}% maybe \fastlocalframed
+ \setbox\scratchbox\hpack\inheritedcolumnsetframed{}% maybe \fastlocalframed
\wd\scratchbox\d_page_set_local_hsize
\ht\scratchbox\!!heighta
\ifcase\columndirection
@@ -558,14 +560,14 @@
{\ifdim\ht\OTRfinalpagebox=\textheight
\bgroup
\ifcase\OTRSETbalancemethod
- \page_otr_construct_and_shipout\box\OTRfinalpagebox
+ \page_otr_construct_and_shipout\box\OTRfinalpagebox\zerocount % three arguments
\else\ifdim\OTRSETbalht>\zeropoint
% catch a bordercase
\scratchdimen\OTRSETbalht
\advance\scratchdimen\lineheight\relax
\ifdim\scratchdimen>\textheight
% full page
- \page_otr_construct_and_shipout\box\OTRfinalpagebox
+ \page_otr_construct_and_shipout\box\OTRfinalpagebox\zerocount % three arguments
\else
% same page
\global\setbox\OTRfinalpagebox \iftracecolumnset\ruledvbox\else\vbox\fi to \OTRSETbalht
@@ -574,7 +576,7 @@
\snaptogrid\vbox{\vskip\OTRSETbalht}% hack
\fi
\else
- \page_otr_construct_and_shipout\box\OTRfinalpagebox
+ \page_otr_construct_and_shipout\box\OTRfinalpagebox\zerocount % three arguments
\fi \fi
\globallet\OTRSETbalht\zeropoint
\egroup
@@ -612,7 +614,7 @@
\ifinotr % else problems with floats, see extreme
\pagegoal\vsize % niet nodig, tenzij binnen otr
\fi
- \synchronizeoutput % fails on example
+ \synchronizeoutput % fails on example, try \triggerpagebuilder instead
% \allowbreak % hm
\fi
\synchronizenotes
@@ -659,7 +661,7 @@
\installcolumnbreakmethod \s!columnset \v!local
{\OTRSETcolumnhbreak
- \page_otr_flush_all_floats
+ \page_set_command_flush_all_floats
\page_otr_eject_page
% no \page_set_command_set_hsize, can be mid smaller (like tabulate)
% also, this one should be executed at the outer level
@@ -2596,27 +2598,31 @@
\fi
\fi}
+\unexpanded\def\page_set_command_flush_all_floats
+ {\page_one_command_flush_all_floats}
+
\defineoutputroutine
[\s!columnset]
[\s!page_otr_command_routine =\page_set_command_routine,
\s!page_otr_command_package_contents =\page_set_command_package_contents,
\s!page_otr_command_set_vsize =\page_set_command_set_vsize,
% \s!page_otr_command_set_hsize =\page_one_command_set_hsize, % tricky, goes wrong
+ \s!page_otr_command_synchronize_hsize =\page_set_command_synchronize_hsize,
\s!page_otr_command_next_page =\page_set_command_next_page,
\s!page_otr_command_next_page_and_inserts =\page_set_command_next_page_and_inserts,
- \s!page_otr_command_synchronize_hsize =\page_set_command_synchronize_hsize,
\s!page_otr_command_set_top_insertions =\page_set_command_set_top_insertions,
\s!page_otr_command_set_bottom_insertions =\page_set_command_set_bottom_insertions,
\s!page_otr_command_flush_top_insertions =\page_set_command_flush_top_insertions,
\s!page_otr_command_flush_bottom_insertions =\page_set_command_flush_bottom_insertions,
- \s!page_otr_command_set_float_hsize =\page_set_command_set_float_hsize,
\s!page_otr_command_check_if_float_fits =\page_set_command_check_if_float_fits,
+ \s!page_otr_command_set_float_hsize =\page_set_command_set_float_hsize,
\s!page_otr_command_flush_float_box =\page_set_command_flush_float_box,
- \s!page_otr_command_synchronize_side_floats =\page_set_command_synchronize_side_floats,
\s!page_otr_command_side_float_output =\page_set_command_side_float_output,
+ \s!page_otr_command_synchronize_side_floats =\page_set_command_synchronize_side_floats,
\s!page_otr_command_flush_floats =\page_set_command_flush_floats,
\s!page_otr_command_flush_side_floats =\page_set_command_flush_side_floats,
- \s!page_otr_command_flush_saved_floats =\page_set_command_flush_saved_floats
+ \s!page_otr_command_flush_saved_floats =\page_set_command_flush_saved_floats,
+ \s!page_otr_command_flush_all_floats =\page_set_command_flush_all_floats,
% \s!page_otr_command_flush_margin_blocks =\page_set_command_flush_margin_blocks, % not used
]
diff --git a/tex/context/base/mkiv/page-sid.mkiv b/tex/context/base/mkiv/page-sid.mkiv
index f32a443be..5742ce8c4 100644
--- a/tex/context/base/mkiv/page-sid.mkiv
+++ b/tex/context/base/mkiv/page-sid.mkiv
@@ -15,31 +15,32 @@
\unprotect
-% These macro deal with side floats. We started with Daniel Comenetz macros as published
-% in TUGBoat Volume 14 (1993), No.\ 1: Anchored Figures at Either Margin. I extended and
-% patched the macros to suite our needs which results in a messy module. Therefore, this
-% module badly needs an update because it's now a mixture of old and new macros.
-
-% Interesting cases where it goes wrong:
-%
-% \placefigure[left]{}{} \dorecurse{3}{\input ward } {\par} \input ward
-%
-% Here we get an unwanted carried over hangindent and parindent. A solution is to associate
-% it with the local par node instead. This is something to discuss with Taco as it could be
-% a new luatex/mkiv feature: explicitly set par properties.
-
-% Maybe I should just rewrite the lot.
+%D These macro deal with side floats. We started with Daniel Comenetz macros as
+%D published in TUGBoat Volume 14 (1993), No.\ 1: Anchored Figures at Either Margin.
+%D I extended and patched the macros to suite our needs which results in a messy
+%D module.
+%D
+%D A complication is that we need to deal with spacing differently before and after
+%D the float. Also, whitespace can interfere as does the prevdepth. There is no real
+%D universal solution. So, by now not much is left of that code, if only because we
+%D need to match \CONTEXT\ spacing module, because we have more placement options
+%D and control and because the math hackery is not suitable for \CONTEXT\ anyway.
+%D
+%D This code had been redone many times because we kept running into spacing issues
+%D and it's not that much fun (or rewarding). It's probably the module that made
+%D me go into distraciton mode most often (like watching amusing Walk of The
+%D Earth, sophisticated Massive Attack video clips, impressive Davie504 movies
+%D and so on).
\newdimen \d_page_sides_height % includes the topskip
\newdimen \d_page_sides_width
\newdimen \d_page_sides_hsize
\newdimen \d_page_sides_vsize
-\newdimen \d_page_sides_vsize_reset \d_page_sides_vsize_reset -\onepoint % could be a \let
+\newdimen \d_page_sides_vsize_reset
\newdimen \d_page_sides_progress
\newdimen \d_page_sides_page_total
-\newdimen \d_page_sides_saved_depth
-\newbox \b_page_sides_bottom
+%newbox \b_page_sides_bottom
\newcount \c_page_sides_lines_done
\newcount \c_page_sides_checks_done
@@ -47,12 +48,9 @@
\newcount \c_page_sides_n_of_hang
\newconstant \c_page_sides_float_type
-\newconditional \c_page_sides_short % needs checking .. best write this from scratch
+\newconditional \c_page_sides_short
\newconditional \c_page_sides_flag
-\newconditional \c_page_sides_r_eq % messy, needs checking anyway
-\newconditional \c_page_sides_l_eq % messy, needs checking anyway
-
\newdimen \d_page_sides_shift
\newdimen \d_page_sides_extrashift
\newdimen \d_page_sides_leftshift
@@ -62,10 +60,43 @@
\newdimen \d_page_sides_maximum
\newdimen \d_page_sides_topskip
\newdimen \d_page_sides_bottomskip
+\newdimen \d_page_sides_midskip
\newdimen \d_page_sides_downshift
+\newdimen \d_page_sides_pagetotal
+\newdimen \d_page_sides_topoffset
+\newdimen \d_page_sides_bottomoffset
+\newdimen \d_page_sides_toptotal
+\newdimen \d_page_sides_bottomtotal
+
+\newconstant \c_page_sides_align
+\newconstant \c_page_sides_skipmode
+\newconstant \c_page_sides_tolerance
+
+\newconstant \c_page_sides_method % sort of obsolete
+
+\newdimen \d_page_sides_progression
-\setnewconstant \c_page_sides_method \plusone % 0=raw 1=safe (.99) 2=tight (-1pt)
-\setnewconstant \c_page_sides_align \zerocount
+\newif \iftracesidefloats % public (might change)
+
+%D Defaults:
+
+\d_page_sides_vsize_reset -\onepoint
+%d_page_sides_vsize_reset \zeropoint % could be an option, needs testing
+
+%D We have some basic (and colorful) tracing:
+
+\def\page_sides_floats_legend
+ {\showmessage\m!floatblocks{16}\empty
+ \glet\page_sides_floats_legend\relax}
+
+\installtextracker{floats.anchoring}
+ {\page_sides_floats_legend
+ \tracesidefloatstrue}
+ {\tracesidefloatsfalse}
+
+%D The horizontal shifts depend on the location: left or right in the text, margin
+%D or edge. These shifts are rather stable and don't interfere with the page flow
+%D as much as the vertical ones do.
\def\page_sides_process_float_backspace {\global\c_page_sides_float_type\plusone \page_sides_handle_float}
\def\page_sides_process_float_leftedge {\global\c_page_sides_float_type\plustwo \page_sides_handle_float}
@@ -77,14 +108,13 @@
\def\page_sides_process_float_cutspace {\global\c_page_sides_float_type\pluseight\page_sides_handle_float}
\def\page_sides_process_float_margin {\global\c_page_sides_float_type\pluseight\page_sides_handle_float}
-\let\logsidefloat\relax
-
-\newif\iftracesidefloats % public (might change)
-
-% end of public variables
-
-\def\page_sides_insert_info
- {\page_floats_report_total}
+\def\page_sides_check_horizontal_skips
+ {\ifcase\c_page_sides_skipmode
+ \or % high
+ \or % low
+ \or % fit
+ \global\d_strc_floats_margin\zeropoint
+ \fi}
\def\page_sides_apply_horizontal_shift
{\ifdim\d_page_sides_maximum>\zeropoint
@@ -185,275 +215,14 @@
+\compensatedinnermakeupmargin
\relax
\fi
- \ifdim\d_page_sides_rightskip>\zeropoint \global\advance\d_page_sides_rightskip\rightskip \fi
- \ifdim\d_page_sides_leftskip >\zeropoint \global\advance\d_page_sides_leftskip \leftskip \fi}
-
-\def\page_sides_flush_floats
- {\par
- \ifnum\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
- \page_sides_flush_floats_indeed
+ \ifdim\d_page_sides_rightskip>\zeropoint
+ \global\advance\d_page_sides_rightskip\rightskip
\fi
- % also here if used at all \global\holdinginserts\zerocount
- \global\d_page_sides_vsize\d_page_sides_vsize_reset
- \global\setfalse\c_page_sides_short
- \global\setfalse\c_page_sides_flag}
-
-\unexpanded\def\doifelsesidefloat
- {\par
- \ifnum\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
- \expandafter\firstoftwoarguments
- \else
- \expandafter\secondoftwoarguments
- \fi}
-
-\let\doifsidefloatelse\doifelsesidefloat
-
-% \def\page_sides_flush_floats_indeed
-% {\global\advance\d_page_sides_vsize-\d_page_sides_bottomskip
-% \begingroup
-% \let\page_sides_flush_floats\relax
-% \forgetall
-% \doloop
-% {\strut
-% \iftracesidefloats
-% \color[darkgray]{\ruledhbox{\strut\kern\d_page_sides_width}}%
-% \fi
-% \par
-% % \ifdim\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
-% \ifdim\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax>\zeropoint
-% \ifnum\recurselevel>\plushundred % safeguard, sort of deadcycles
-% \exitloop
-% \fi
-% \else
-% \exitloop
-% \fi}%
-% \endgroup
-% \ifdim\parskip>\zeropoint % why this test ?
-% \ifdim\d_page_sides_bottomskip>\parskip
-% % \nowhitespace
-% % \vskip\d_page_sides_bottomskip
-% \blank[\v!nowhite,\the\dimexpr\d_page_sides_bottomskip]
-% \fi
-% \else
-% \blank[\the\d_page_sides_bottomskip]% new, so needs checking
-% \fi}
-
-\installcorenamespace{sidefloatsteps}
-
-\setvalue{\??sidefloatsteps\v!line }{\strut}
-\setvalue{\??sidefloatsteps\v!big }{\strut}
-\setvalue{\??sidefloatsteps\v!medium}{\halfstrut}
-\setvalue{\??sidefloatsteps\v!small }{\quarterstrut}
-\setvalue{\??sidefloatsteps\v!depth }{\depthstrut}
-
-% we don't officially know what kind of float we flush
-
-\def\page_sides_flush_floats_indeed
- {\global\advance\d_page_sides_vsize-\d_page_sides_bottomskip
- \begingroup
- \let\page_sides_flush_floats\relax
- \edef\m_pages_strut{\executeifdefined{\??sidefloatsteps\rootfloatparameter\c!step}\strut}%
- \forgetall
- \doloop
- {\iftracesidefloats
- \dontleavehmode
- \ruledhpack{\m_pages_strut\kern\d_page_sides_width}%
- \else
- \m_pages_strut
- \fi
- \par
- \nointerlineskip
- % \ifdim\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax>\zeropoint
- \ifdim\dimexpr\d_page_sides_vsize-\pagetotal\relax>\zeropoint
- \ifnum\recurselevel>\plushundred % safeguard, sort of deadcycles
- \exitloop
- \fi
- \else
- \exitloop
- \fi}%
- \endgroup
- \ifdim\parskip>\zeropoint % why this test ?
- \ifdim\d_page_sides_bottomskip>\parskip
- % \nowhitespace
- % \vskip\d_page_sides_bottomskip
-% \blank[\v!nowhite,\the\dimexpr\d_page_sides_bottomskip]
- \blank[\v!nowhite,\rootfloatparameter\c!sidespaceafter]
- \fi
- \else
-% \blank[\the\d_page_sides_bottomskip]% new, so needs checking
- \blank[\rootfloatparameter\c!sidespaceafter]% new, so needs checking
- \fi}
-
-% alternative method (unsnapped)
-%
-% \def\page_sides_flush_floats_indeed
-% {\privatescratchdimen\dimexpr\d_page_sides_vsize-\d_page_sides_bottomskip-\pagetotal\relax
-% \ifdim\parskip>\zeropoint % why this test ?
-% \ifdim\privatescratchdimen>\parskip
-% \blank[\v!nowhite,\the\privatescratchdimen] % better in stages
-% \fi
-% \else
-% \blank[\the\privatescratchdimen]
-% \fi}
-
-\def\page_sides_check_floats_after_par
- {\page_sides_check_floats_indeed
- \ifdim\oldpagetotal=\pagetotal \else
- \global\let\page_sides_check_floats\page_sides_check_floats_indeed
- \page_sides_flush_floats
- \global\c_page_sides_n_of_lines\zerocount % here !
+ \ifdim\d_page_sides_leftskip >\zeropoint
+ \global\advance\d_page_sides_leftskip \leftskip
\fi}
-\unexpanded\def\page_sides_flush_floats_after_par
- {\xdef\oldpagetotal{\the\pagetotal}%
- \global\let\page_sides_check_floats\page_sides_check_floats_after_par}
-
-\unexpanded\def\page_sides_forget_floats
- {\global\d_page_sides_vsize\d_page_sides_vsize_reset
- \global\c_page_sides_n_of_lines\zerocount
- % also here if used at all \global\holdinginserts\zerocount
- \global\setfalse\c_page_sides_short
- \global\setfalse\c_page_sides_flag}
-
-\let\page_otr_sides_pop_penalties \relax
-\let\page_otr_sides_push_penalties\relax
-
-% \def\page_otr_sides_push_penalties % needed ? and right
-% {\widowpenalty\plusone
-% \clubpenalty\plustwo
-% \brokenpenalty\plusone
-% \let\page_otr_sides_push_penalties\relax
-% \edef\page_otr_sides_pop_penalties
-% {\widowpenalty \the\widowpenalty
-% \clubpenalty \the\clubpenalty
-% \brokenpenalty\the\brokenpenalty
-% \let\page_otr_sides_pop_penalties\relax}}
-%
-% shouldn't that be:
-%
-% \def\page_otr_sides_push_penalties % needed?
-% {\let\page_otr_sides_push_penalties\relax
-% \edef\page_otr_sides_pop_penalties
-% {\widowpenalty \the\widowpenalty
-% \clubpenalty \the\clubpenalty
-% \brokenpenalty\the\brokenpenalty
-% \let\page_otr_sides_pop_penalties\relax}%
-% \widowpenalty\plusone
-% \clubpenalty\plustwo
-% \brokenpenalty\plusone}
-
-\def\page_sides_restore_output_penalty
- {\ifnum\outputpenalty=\plustenthousand \else
- \penalty\outputpenalty
- \fi}
-
-\def\page_sides_output_routine
- {\ifconditional\c_page_sides_short
- \page_sides_output_routine_yes
- \else
- \page_sides_output_routine_nop
- \fi}
-
-\def\page_sides_output_routine_nop
- {\page_otr_command_side_float_output % otr specific
- \global\d_page_sides_vsize\d_page_sides_vsize_reset
- \global\c_page_sides_n_of_lines\zerocount
- % we can end up in fig lock, so \global\holdinginserts\zerocount here too ?
- \page_otr_sides_pop_penalties}
-
-\def\page_sides_output_routine_yes % we need to rework this ... add pagediscards and such
- {\unvbox\normalpagebox % bah, and the discards?
-% \pagediscards
- \setbox\b_page_sides_bottom\lastbox
- \ifdim\wd\b_page_sides_bottom>\d_page_sides_hsize
- \penalty-201 % hm, i really need to write this from scatch
- \box\b_page_sides_bottom
- \else\ifvoid\b_page_sides_bottom
- \else
- \page_sides_restore_left_indent
- \ifdim\wd\b_page_sides_bottom<\d_page_sides_hsize
- \parskip\zeropoint
- %\noindent
- \ifinner\else
- \vadjust{\penalty\minusone}%
- \fi
- \ifconditional\c_page_sides_l_eq
- \global\setfalse\c_page_sides_l_eq
- \else
- \global\advance\d_page_sides_hsize -\wd\b_page_sides_bottom
- \ifconditional\c_page_sides_r_eq
- \global\setfalse\c_page_sides_r_eq
- \else
- \global\divide\d_page_sides_hsize \plustwo
- \fi
- \hskip\d_page_sides_hsize % \kern
- \fi
- \fi
- \box\b_page_sides_bottom
- \page_sides_restore_output_penalty
- \fi\fi
- % why was this \global\holdinginserts\zerocount
- \global\setfalse\c_page_sides_short}
-
-\def\page_sides_restore_left_indent
- {\relax
- \ifnum\c_page_sides_float_type>\plusfour \else
- \parskip\zeropoint % here ?
- \ifdim\d_page_sides_width>\zeropoint % new, see prikkels
- \noindent
- \ifinner \else
- \vadjust{\penalty\minusone}%
- \fi
- \hskip\d_page_sides_width\relax % \kern
- %\else
- % we have a margin or edge float
- \fi
- \fi}
-
-% The clean way:
-%
-% \global\setbox\floatbox\hbox
-% {\ifcase\c_page_sides_float_type
-% \vbox{#1}%
-% \or % 1
-% \kern\d_page_sides_leftshift
-% \kern\d_page_sides_shift
-% \vbox{#1}%
-% \kern-\d_page_sides_extrashift
-% \or % 2
-% \kern\d_page_sides_shift
-% \vbox{#1}%
-% \kern-\d_page_sides_extrashift
-% \or % 3
-% \kern\d_page_sides_shift
-% \vbox{#1}%
-% \kern-\d_page_sides_extrashift
-% \or % 4
-% \kern\d_page_sides_leftshift
-% \kern\d_page_sides_shift
-% \vbox{#1\removedepth}%
-% \kern-\d_page_sides_extrashift
-% \or % 5
-% \kern-\d_page_sides_extrashift
-% \vbox{#1}%
-% \kern\d_page_sides_shift
-% \kern\d_page_sides_rightshift
-% \or % 6
-% \kern-\d_page_sides_extrashift
-% \vbox{#1}%
-% \kern\d_page_sides_shift
-% \or % 7
-% \kern-\d_page_sides_extrashift
-% \vbox{#1}%
-% \kern\d_page_sides_shift
-% \or % 8
-% \kern-\d_page_sides_extrashift
-% \vbox{#1}%
-% \kern\d_page_sides_shift
-% \kern\d_page_sides_rightshift
-% \fi}
-%
-% The compact way:
+%D Shifts get applied to the float box:
\def\page_sides_relocate_float#1%
{\global\setbox\floatbox\hpack
@@ -479,67 +248,347 @@
\kern\d_page_sides_rightshift
\fi\fi}}
+%D The vertical skips are a nightmare and this mechanism is about as complex
+%D as one can get it.
+
+\def\page_sides_check_vertical_skips
+ {\ifdim\d_page_sides_topskip <\zeropoint\d_page_sides_topskip \zeropoint\fi
+ \ifdim\d_page_sides_bottomskip<\zeropoint\d_page_sides_bottomskip\zeropoint\fi
+ \ifdim\d_page_sides_midskip <\zeropoint\d_page_sides_midskip \zeropoint\fi
+ %
+ \global\d_page_sides_toptotal \dimexpr\d_page_sides_topskip +\d_page_sides_topoffset \relax
+ \global\d_page_sides_bottomtotal\dimexpr\d_page_sides_bottomskip+\d_page_sides_bottomoffset\relax
+ \ifcase\c_page_sides_skipmode
+ \or % high
+ \global\d_page_sides_toptotal \d_page_sides_topoffset
+ \or % low
+ \global\d_page_sides_bottomtotal\d_page_sides_bottomoffset
+ \or % fit
+ \global\d_page_sides_toptotal \d_page_sides_topoffset
+ \global\d_page_sides_bottomtotal\d_page_sides_bottomoffset
+ \fi}
+
+%D These shifts get (selectively) applied with a bit of optional tracing.
+
+\def\page_sides_apply_vertical_shift_normal
+ {\global\setbox\floatbox\hpack % why extra box
+ {\vpack
+ {\forgetall
+ \hsize\wd\floatbox
+ \vskip\privatescratchdimen
+ \offinterlineskip
+ \box\floatbox
+ % somehow we need this \scratchbox magic, but at least it's the same as the
+ % tracer now
+ \setbox\scratchbox\emptybox
+ \wd\scratchbox\hsize
+ \ht\scratchbox\d_page_sides_bottomtotal
+ \box\scratchbox
+ \vskip-\d_page_sides_bottomtotal
+ \ifnum\c_page_sides_align=\plusfive
+ \vskip-\lineheight
+ \fi}}}
+
+\def\page_sides_apply_vertical_shift_traced
+ {\global\setbox\floatbox\hpack % why extra box
+ {\backgroundline[trace:r]{\ruledhpack{\vpack
+ {\forgetall
+ \hsize\wd\floatbox
+ \vskip\privatescratchdimen
+ \offinterlineskip
+ \backgroundline
+ [trace:g]%
+ {\ruledhpack{\box\floatbox}}%
+ \par
+ \blackrule
+ [\c!color=trace:s,%
+ \c!height=\d_page_sides_bottomtotal,%
+ \c!depth=\zeropoint,%
+ \c!width=\hsize]%
+ \vskip-\d_page_sides_bottomtotal
+ \ifnum\c_page_sides_align=\plusfive
+ \vskip-\lineheight
+ \fi}}}}}
+
\def\page_sides_apply_vertical_shift
{\ifnum\c_page_sides_align=\plusfour
\getnoflines{\ht\floatbox}%
- \privatescratchdimen\dimexpr\noflines\lineheight-\strutdepth\relax
- \getrawnoflines\d_page_sides_topskip
+ \privatescratchdimen\dimexpr\noflines\lineheight-\strutdp\relax
+ \getrawnoflines\d_page_sides_toptotal
\advance\privatescratchdimen\noflines\lineheight
- % todo: maybe rounding problem here
- % \global\setbox\floatbox\hbox{\lower\lineheight\box\floatbox}%
- \global\setbox\floatbox\hpack{\lower\strutdepth\box\floatbox}%
+ \page_sides_force_depth
\ht\floatbox\privatescratchdimen
\dp\floatbox\zeropoint
\fi
\ifcase\c_page_sides_align \else
- \global\d_page_sides_topskip\zeropoint
+ \global\d_page_sides_toptotal\zeropoint
\fi
\privatescratchdimen
\ifnum\c_page_sides_float_type<\plusfour
- \d_page_sides_topskip
+ \d_page_sides_toptotal
\else\ifnum\c_page_sides_float_type>\plusfive
- \d_page_sides_topskip
+ \d_page_sides_toptotal
\else
\zeropoint
\fi\fi
% the top of the box is at the previous baseline
\ifcase\c_page_sides_align
% 0 normal
- \advance\privatescratchdimen\strutdepth % or \openstrutdepth
+ \advance\privatescratchdimen\strutdp % or \openstrutdepth
\or % 1 height
- \advance\privatescratchdimen\strutdepth % or \openstrutdepth
+ \advance\privatescratchdimen\strutdp % or \openstrutdepth
\or % 2 line
\or % 3 depth
- \advance\privatescratchdimen\lineheight % or \openlineheight
- \advance\privatescratchdimen\strutdepth % or \openstrutdepth
+ \advance\privatescratchdimen\lineheight % or \openlineheight
+ \advance\privatescratchdimen\strutdp % or \openstrutdepth
\or % 4 grid
\privatescratchdimen\zeropoint
\or
- \advance\privatescratchdimen\strutheight % or \openstrutheight
+ \advance\privatescratchdimen\strutht % or \openstrutheight
\fi
% new
\global\c_page_sides_lines_done\zerocount
\ifnum\c_page_sides_n_of_lines>\zerocount
\advance\privatescratchdimen\c_page_sides_n_of_lines\lineheight
\fi
- \global\setbox\floatbox\hpack % why extra box
- {\vbox
- {\vskip\privatescratchdimen
- \nointerlineskip
- \box\floatbox
- \ifnum\c_page_sides_align=\plusfive \vskip-\lineheight \fi}}%
+ \iftracesidefloats
+ \page_sides_apply_vertical_shift_traced % uses \privatescratchdimen
+ \else
+ \page_sides_apply_vertical_shift_normal % uses \privatescratchdimen
+ \fi
\ifnum\c_page_sides_float_type<\plusfour
- \global\d_page_sides_topskip\zeropoint
+ \global\d_page_sides_toptotal\zeropoint
\else\ifnum\c_page_sides_float_type>\plusfive
- \global\d_page_sides_topskip\zeropoint
+ \global\d_page_sides_toptotal\zeropoint
\fi\fi
\global\d_page_sides_downshift\zeropoint}
-\def\page_sides_place_float
- {\ifnum\c_page_sides_float_type=\plusfour \kern\d_page_sides_topskip \fi
- \ifnum\c_page_sides_float_type=\plusfive \kern\d_page_sides_topskip \fi
- \d_page_sides_saved_depth\prevdepth
+%D We have a few virtual dimensions. I'm not sure what to do with \type
+%D {\pagedepth} and \type {\pageshrink} in the next two. If we ever need
+%D that it will become options.
+
+\def\e_page_sides_flush_criterium
+ {\dimexpr
+ \d_page_sides_vsize
+ -\d_page_sides_bottomtotal
+ -\pagetotal
+ \relax}
+
+\def\e_page_sides_room_criterium
+ {\dimexpr
+ \d_page_sides_vsize
+ -\d_page_sides_bottomtotal % added here too
+ -\pagetotal
+ \relax}
+
+%D In order to get a consistent spacing we force a strutdepth unless the
+%D preceding material has more depth than that already. This way anchoring
+%D becomes predictable.
+
+\unexpanded\def\page_sides_force_depth
+ {\iftracesidefloats
+ \begingroup
+ \c_page_force_strut_depth_trace_mode\plusone
+ \forcestrutdepth
+ \endgroup
+ \else
+ \forcestrutdepth
+ \fi}
+
+\def\page_sides_flush_floats
+ {\par
+ \ifdim\e_page_sides_flush_criterium>\zeropoint
+ \page_sides_flush_floats_progress
+ \page_sides_flush_floats_after_next
+ \fi
+ \page_sides_flush_floats_reset}
+
+\def\page_sides_flush_floats_text
+ {\par
+ % what with \c_anch_backgrounds_text_level>\plusone
+ \ifdim\e_page_sides_flush_criterium>\zeropoint
+ \page_sides_flush_floats_progress
+ \page_sides_flush_floats_after_none
+ \fi
+ \page_sides_flush_floats_reset}
+
+\def\page_sides_flush_floats_reset
+ {\global\d_page_sides_vsize\d_page_sides_vsize_reset
+ % also here if used at all \global\holdinginserts\zerocount
+ \global\setfalse\c_page_sides_short
+ \global\setfalse\c_page_sides_flag
+ \global\c_page_sides_checks_done\zerocount}
+
+\def\page_sides_flush_floats_after_none % we force a flush
+ {\ifdim\d_page_sides_midskip>\zeropoint
+ \blank[\the\d_page_sides_midskip]
+ \fi
+ \ignoreparskip
+ \blank[\v!disable]}
+
+\def\page_sides_flush_floats_after_next % we have two successive ones
+ {\ifdim\d_page_sides_bottomskip>\zeropoint
+ \blank[\the\d_page_sides_bottomskip]
+ \fi
+ \ignoreparskip
+ \blank[\v!disable]}
+
+%D A rudimentary checker:
+
+\unexpanded\def\doifelsesidefloat
+ {\par
+ \ifdim\e_page_sides_room_criterium>\zeropoint % -\pagedepth
+ \expandafter\firstoftwoarguments
+ \else
+ \expandafter\secondoftwoarguments
+ \fi}
+
+\let\doifsidefloatelse\doifelsesidefloat
+
+%D Sometimes we need to fill up the space alongside a side float and this
+%D is where we define the helpers. A user can enforce a smaller step. We use
+%D large steps when possible.
+
+\installcorenamespace{sidefloatsteps}
+
+\setvalue{\??sidefloatsteps\v!line }{\strut}
+\setvalue{\??sidefloatsteps\v!big }{\strut}
+\setvalue{\??sidefloatsteps\v!medium}{\halflinestrut} % was \halfstrut
+\setvalue{\??sidefloatsteps\v!small }{\noheightstrut} % was \quarterstrut
+
+\def\page_sides_flush_floats_tracer
+ {\dontleavehmode
+ \ruledhpack\bgroup\backgroundline[trace:b]{%
+ \llap{\smash{\vrule\s!width4\points\s!height.4\points\s!depth.4\points}}%
+ \ifnum\recurselevel=\plusone
+ \llap{\smash{\smallinfofont\the\scratchdimen}\hskip.5\leftmargindistance}%
+ \else\ifodd\recurselevel
+ \llap{\smash{\smallinfofont\recurselevel}\hskip.5\leftmargindistance}%
+ \fi\fi
+ \page_sides_flush_floats_normal
+ \kern\hsize
+ \egroup}}
+
+\def\page_sides_flush_floats_normal
+ {\ifdim\scratchdimen>\htdp\strutbox
+ \strut
+ \else
+ \m_pages_strut
+ \fi}
+
+\def\page_sides_flush_floats_progress
+ {\begingroup
+ \page_sides_force_depth
+ \parskip\zeropoint
+ \let\page_sides_flush_floats\relax
+ \edef\m_pages_strut
+ {\ifcsname\??sidefloatsteps\rootfloatparameter\c!step\endcsname
+ \lastnamedcs
+ \else
+ \noheightstrut
+ \fi}%
+ \forgetall
+ \offinterlineskip
+ \doloop
+ {\scratchdimen\e_page_sides_flush_criterium
+ \ifdim\scratchdimen>\onepoint % good enough, can become configurable
+ \ifnum\recurselevel>\plushundred % safeguard, sort of deadcycles
+ \exitloop
+ \else\iftracesidefloats
+ \page_sides_flush_floats_tracer\par
+ \else
+ \page_sides_flush_floats_normal\par
+ \fi\fi
+ \else
+ \page_sides_force_depth
+ \exitloop
+ \fi}%
+ \endgroup}
+
+%D We force a parskip and ignore it afterwards. We can nil it by setting the
+%D \type {spacebeforeside} parameter. We can have a leading blank so we need
+%D to make sure that we use blank to inject the parskip and then ignore
+%D the one injected by the engine.
+
+\def\page_sides_inject_before
+ {\page_sides_force_depth
+ \ifdim\parskip>\zeropoint
+ \ifdim\parskip>\d_strc_floats_top
+ \ifdim\d_strc_floats_top>\zeropoint
+ \ignoreparskip
+ \blank[\v!white]%
+ \else
+ \checkedblank[\rootfloatparameter\c!spacebeforeside]%
+ \fi
+ \else
+ \checkedblank[\rootfloatparameter\c!spacebeforeside]%
+ \fi
+ \else
+ \checkedblank[\rootfloatparameter\c!spacebeforeside]%
+ \fi}
+
+%D We are now done with \type {spacebefore} and the parskip is already
+%D injected. The dummy line makes sure that we anchor properly and it
+%D also can serve as tracer.
+
+\def\page_sides_inject_dummy_line_normal
+ {\hpack to \availablehsize{\strut\hss}}
+
+\def\page_sides_inject_dummy_line_traced
+ {\ruledhpack to \availablehsize{\backgroundline[trace:c]{\page_sides_inject_dummy_line_normal}}}
+
+\def\page_sides_inject_dummy_lines
+ {\par
\nointerlineskip
+ \dontleavehmode
+ \iftracesidefloats
+ \page_sides_inject_dummy_line_traced
+ \else
+ \page_sides_inject_dummy_line_normal
+ \fi
+ \vskip-\dimexpr\lineheight+\strutdp\relax
+ \ignoreparskip
+ \blank[\v!samepage]
+ \blank[\v!disable]}
+
+%D Checkers:
+
+\def\page_sides_check_floats_after_par
+ {\page_sides_check_floats_indeed
+ \ifdim\d_page_sides_pagetotal=\pagetotal \else
+ \global\let\page_sides_check_floats\page_sides_check_floats_indeed
+ \page_sides_flush_floats
+ \global\c_page_sides_n_of_lines\zerocount % here !
+ \fi}
+
+\unexpanded\def\page_sides_flush_floats_after_par
+ {\global\d_page_sides_pagetotal\pagetotal
+ \global\let\page_sides_check_floats\page_sides_check_floats_after_par}
+
+\unexpanded\def\page_sides_forget_floats
+ {\global\d_page_sides_vsize\d_page_sides_vsize_reset
+ \global\c_page_sides_n_of_lines\zerocount
+ % also here if used at all \global\holdinginserts\zerocount
+ \global\setfalse\c_page_sides_short
+ \global\setfalse\c_page_sides_flag}
+
+%D Here comes the output routine. We either go the fast route or we use the
+%D normal one (stored in \type {\page_otr_command_side_float_output}. We no
+%D longer have this fuzzy code around with penalties and indentation and
+%D such.
+
+\def\page_sides_output_routine
+ {\page_otr_command_side_float_output
+ \ifconditional\c_page_sides_short
+ \global\setfalse\c_page_sides_short
+ \else
+ \global\d_page_sides_vsize\d_page_sides_vsize_reset
+ \global\c_page_sides_n_of_lines\zerocount
+ \fi}
+
+\def\page_sides_place_float
+ {\ifnum\c_page_sides_float_type=\plusfour \kern\d_page_sides_toptotal \fi
+ \ifnum\c_page_sides_float_type=\plusfive \kern\d_page_sides_toptotal \fi
\ifgridsnapping
\page_sides_place_float_grid
\else
@@ -548,21 +597,20 @@
\par
\kern-\d_page_sides_height
\penalty10001 % oeps, this will change
- \normalbaselines
- \prevdepth\d_page_sides_saved_depth}
+ \normalbaselines}
\def\page_sides_place_float_normal
{\page_sides_push_float_inline\firstofoneargument}
-% The following needs some more work .. consider this a quick hack. We
-% probably need an mkiv hanging grid option.
+%D The following needs some more work .. consider this a quick hack. We probably
+%D need an mkiv hanging grid option.
\def\page_sides_place_snap_to_grid#1%
{\edef\p_grid{\floatparameter\c!grid}%
\ifx\p_grid\empty\else
\snaptogrid[\p_grid]%
\fi
- \hbox{#1}}
+ \hpack{#1}}
\def\page_sides_place_float_grid
{\getrawnoflines\d_page_sides_height % raw ?
@@ -575,10 +623,11 @@
{\begingroup
\reseteverypar % needed !
\parskip\zeropoint % needed !
+ \nointerlineskip
\page_sides_set_skips
- \page_sides_insert_info
+ \page_floats_report_total
\relax
- \lefttoright % \textdir TLT\relax % or \ifconditional\displaylefttoright below (more work)
+ \lefttoright
\strc_floats_mark_par_as_free
\ifcase\c_page_sides_float_type
% invalid
@@ -612,6 +661,7 @@
\def\page_sides_analyse_space
{\global\settrue\c_page_sides_flag
+ \page_sides_force_depth
\global\d_page_sides_page_total \pagetotal % global
\ifnum\c_page_sides_float_type<\plusfour
\global\d_page_sides_width \zeropoint
@@ -624,79 +674,53 @@
\global\d_page_sides_width\zeropoint
\fi
\global\d_page_sides_hsize \dimexpr\hsize-\d_page_sides_width\relax
- \global\d_page_sides_height\dimexpr\ht\floatbox+\dp\floatbox+\d_page_sides_topskip\relax
+ \global\d_page_sides_height\dimexpr\ht\floatbox+\dp\floatbox+\d_page_sides_toptotal\relax
\global\d_page_sides_vsize \dimexpr\d_page_sides_height+\d_page_sides_page_total\relax
- \dimen0\d_page_sides_vsize
- \dimen2\pagegoal
- \relax
- \ifcase\c_page_sides_method
- % method 0 : raw
+ \scratchdimenone\d_page_sides_vsize
+ \scratchdimentwo\pagegoal
+ \ifcase\c_page_sides_tolerance
+ \ifcase\c_page_sides_method
+ % method 0 : raw
+ \or
+ % method 1 : safe (default)
+ \advance\scratchdimentwo -\strutdp
+ \or
+ % method 2 : tight (grid default)
+ \advance\scratchdimenone -\onepoint
+ \fi
\or
- % method 1 : safe
- \dimen2 .99\pagegoal
+ % tolerant
+ \advance\scratchdimentwo -.5\strutdp
\or
- % method 2 : tight
- \advance\dimen0 -\onepoint
+ % verytolerant
+ % \advance\scratchdimenone -\onepoint (maybe)
+ \else
+ \advance\scratchdimentwo -\strutdp
\fi
- \relax % really needed ! ! ! !
- \ifdim\dimen0>\dimen2
+ % how about \pagedepth
+ \ifdim\scratchdimenone>\scratchdimentwo
\global\setfalse\c_page_floats_room
\else
- \ifdim\dimexpr\pagegoal-\d_page_sides_vsize\relax<\d_page_sides_bottomskip
- % just weird: \global\advance\d_page_sides_vsize \dimen0
+ \ifdim\dimexpr\pagegoal-\d_page_sides_vsize\relax<\d_page_sides_bottomtotal
+ % just weird: \global\advance\d_page_sides_vsize \scratchdimenone
\global\settrue\c_page_sides_short
- \page_otr_sides_push_penalties
% why was this \global\holdinginserts\plusone
\else
- \global\advance\d_page_sides_vsize \d_page_sides_bottomskip
+ \global\advance\d_page_sides_vsize \d_page_sides_bottomtotal % wins over inbetween
\global\setfalse\c_page_sides_short
\fi
\global\settrue\c_page_floats_room
\fi}
-% \def\page_sides_prepare_space
-% {\par
-% % no longer needed \whitespace
-% \begingroup
-% \forgetall
-% \reseteverypar
-% \verticalstrut
-% \vskip-\struttotal
-% \endgroup}
-
-% \installtextracker
-% {sidefloats.anchor}
-% {\let\page_sides_anchor\page_sides_anchor_yes}
-% {\let\page_sides_anchor\page_sides_anchor_nop}
-%
-% \def\page_sides_anchor_yes
-% {\darkred
-% \hskip-5\emwidth
-% \vrule\s!height.05\exheight\s!depth.05\exheight\s!width10\emwidth}
-%
-% \def\page_sides_anchor_nop
-% {\strut}
-%
-% \let\page_sides_anchor\page_sides_anchor_nop
-%
-% \def\page_sides_prepare_space
-% {\par
-% \begingroup
-% \reseteverypar
-% \dontleavehmode\hpack to \zeropoint{\page_sides_anchor\hss\strut}%
-% \vskip-\parskip
-% \vskip-\struttotal
-% \inhibitblank
-% \endgroup}
-
-\def\page_sides_prepare_space
- {\fakenextstrutline
- \inhibitblank}
+%D As we have no clear end of one or more paragraphs we only have pre float
+%D skips.
\def\page_sides_handle_float#1% grid (4) is rather experimental
- {\page_sides_prepare_space
+ {\page_sides_check_horizontal_skips
+ \page_sides_check_vertical_skips
\page_sides_apply_horizontal_shift
\page_sides_check_previous_float
+ \page_sides_inject_before
\page_sides_inject_dummy_lines
\page_sides_relocate_float{#1}%
\page_sides_apply_vertical_shift
@@ -704,11 +728,18 @@
\ifconditional\c_page_floats_room \else
\page_otr_fill_and_eject_page
\page_sides_analyse_space
+ %\page_sides_inject_before
\page_sides_inject_dummy_lines
\fi
\page_sides_place_float
\page_sides_check_floats_reset
- \ignorespaces} % not really needed
+ \page_sides_wrapup}
+
+\def\page_sides_wrapup
+ {% we need to do this aftergroup
+ \ignoreparskip \aftergroup\ignoreparskip
+ \ignorespaces \aftergroup\ignorespaces
+ }
\def\page_sides_check_floats_indeed
{\page_sides_analyse_progress
@@ -717,16 +748,44 @@
\else
\page_sides_check_floats_reset
\fi
- \parskip\s_spac_whitespace_parskip}
+ \parskip\s_spac_whitespace_parskip} % not needed
\let\page_sides_check_floats\page_sides_check_floats_indeed
+\unexpanded\def\page_sides_check_floats_tracer
+ {\begingroup
+ \dontleavehmode
+ \ifnum\c_page_sides_float_type>\plusfour
+ \rlap
+ {\hskip\availablehsize % d_page_sides_width % kern
+ \color[trace:o]%
+ {\rlap{\kern.25\bodyfontsize\showstruts\strut}%
+ \vrule\s!height.5\points\s!depth.5\points\s!width\d_page_sides_width}}%
+ \else
+ \hskip-\d_page_sides_width % kern
+ \color[trace:o]%
+ {\vrule\s!height.5\points\s!depth.5\points\s!width\d_page_sides_width
+ \llap{\showstruts\strut\kern.25\bodyfontsize}}%
+ \fi
+ \endgroup}
+
\unexpanded\def\page_sides_check_floats_set
- {\privatescratchdimen\dimexpr\d_page_sides_progress+\strutht-\roundingeps\relax
- \c_page_sides_n_of_hang\privatescratchdimen
- \divide\c_page_sides_n_of_hang \baselineskip\relax
+ {\edef\p_sidethreshold{\floatparameter\c!sidethreshold}%
+ \ifx\p_sidethreshold\v!old
+ \d_page_sides_progression\dimexpr\d_page_sides_progress+\strutht-\roundingeps\relax
+ \c_page_sides_n_of_hang\d_page_sides_progression
+ \divide\c_page_sides_n_of_hang \baselineskip\relax
+ \else
+ \d_page_sides_progression
+ \ifx\p_sidethreshold\empty
+ \d_page_sides_progress
+ \else
+ \dimexpr\d_page_sides_progress-\p_sidethreshold\relax
+ \fi
+ \getnoflines\d_page_sides_progression
+ \c_page_sides_n_of_hang\noflines
+ \fi
\ifnum\c_page_sides_n_of_hang>\zerocount
- % new from here (needs test case, old code)
\ifcase\c_page_sides_n_of_lines
\else
\ifcase\c_page_sides_lines_done
@@ -737,40 +796,35 @@
\global\advance\c_page_sides_n_of_lines-\privatescratchcounter
\fi
\fi
- % hm, when do we get the parshape branch? needs testing
\ifnum\c_page_sides_n_of_lines>\zerocount
\privatescratchtoks\emptytoks
\privatescratchcounter\c_page_sides_n_of_lines
\privatescratchdimen\dimexpr\hsize-\d_page_sides_width\relax
\dorecurse\c_page_sides_n_of_lines
- {\appendtoks \zeropoint \hsize \to \privatescratchtoks}%
+ {\toksapp\privatescratchtoks{\zeropoint\hsize}}%
\ifnum\c_page_sides_n_of_hang>\c_page_sides_n_of_lines
\advance\c_page_sides_n_of_hang -\c_page_sides_n_of_lines\relax
\advance\privatescratchcounter\c_page_sides_n_of_hang
\dorecurse\c_page_sides_n_of_hang % weird, shouldn't that be scratchcounter
{\ifnum\c_page_sides_float_type>\plusfour
- \appendtoks \zeropoint \privatescratchdimen \to \privatescratchtoks
+ \toksapp\privatescratchtoks{\zeropoint\privatescratchdimen}%
\else
- \appendtoks \d_page_sides_width\privatescratchdimen \to \privatescratchtoks
+ \toksapp\privatescratchtoks{\d_page_sides_width\privatescratchdimen}%
\fi}%
\fi
\parshape
\numexpr\privatescratchcounter+\plusone\relax
- \the\privatescratchtoks % \privatescratchcounter
- \zeropoint \hsize % \plusone
+ \the\privatescratchtoks
+ \zeropoint \hsize
\relax
\else
- % new till here
\hangindent \ifnum\c_page_sides_float_type>\plusfour -\fi\d_page_sides_width
\hangafter-\c_page_sides_n_of_hang
\fi
\fi
\global\advance\c_page_sides_checks_done \plusone
\iftracesidefloats
- \hskip-\d_page_sides_width % kern
- \color[darkgray]%
- {\vrule\s!height.5\points\s!depth.5\points\s!width\d_page_sides_width
- \llap{\showstruts\strut\kern.25\bodyfontsize}}%
+ \page_sides_check_floats_tracer
\fi}
\unexpanded\def\page_sides_check_floats_reset
@@ -792,72 +846,21 @@
\fi
\fi}
-\def\page_sides_inject_dummy_lines
- {\begingroup
- \privatescratchcounter\pageshrink
- \divide\privatescratchcounter \baselineskip
- \advance\privatescratchcounter \plusone
- \parskip\zeropoint
- \dorecurse\privatescratchcounter{\hpack to \hsize{}}%
- \kern-\privatescratchcounter\baselineskip
- \penalty\zerocount
- \endgroup}
-
-% Display math
-%
-% If we need this, it will be redone but redefining
-% these macros does not really work out in the current
-% situation.
+% \def\adjustsidefloatdisplaylines % public, will change
+% {\aftergroup\page_sides_adjust_display_lines}
%
-% \ifx\normalleqno\undefined
-%
-% \let\floatrighteqo=\eqno
-% \let\floatleftleqo=\leqno
-%
-% \else
-%
-% \let\floatrighteqo=\normaleqno
-% \let\floatleftleqo=\normalleqno
-%
-% \fi
-%
-% \ifx\normaleqno\undefined
-%
-% \def\normaleqno
-% {\ifconditional\c_page_sides_short
-% \global\settrue\c_page_sides_r_eq
-% \fi
-% \floatrighteqo}
-%
-% \else
-%
-% \def\eqno
-% {\ifconditional\c_page_sides_short
-% \global\settrue\c_page_sides_r_eq
-% \fi
-% \floatrighteqo}
-%
-% \fi
-
-\def\adjustsidefloatdisplaylines % public, will change
- {\aftergroup\page_sides_adjust_display_lines}
-
-\def\page_sides_adjust_display_lines
- {\par
- \noindent
- \ignorespaces}
-
-% We need to hook it into the other otr's. This code will be adapted
-% once we rename the callers.
-
-% \def as they can be redefined!
+% \def\page_sides_adjust_display_lines
+% {\par
+% \noindent
+% \ignorespaces}
-% some will become obsolete
+%D We need to hook it into the other otr's. This code will be adapted once we rename
+%D the callers. We use \type {\def} as they can be redefined! Some will become obsolete
-\def\checksidefloat {\page_sides_check_floats}
-\def\flushsidefloats {\page_sides_flush_floats}
-\def\flushsidefloatsafterpar{\page_sides_flush_floats_after_par}
-\def\forgetsidefloats {\page_sides_forget_floats}
-%def\synchronizesidefloats {\page_sides_synchronize_floats}
+\unexpanded\def\checksidefloat {\page_sides_check_floats}
+\unexpanded\def\flushsidefloats {\page_sides_flush_floats_text}
+\unexpanded\def\flushsidefloatsafterpar{\page_sides_flush_floats_after_par}
+\unexpanded\def\forgetsidefloats {\page_sides_forget_floats}
+%unexpanded\def\synchronizesidefloats {\page_sides_synchronize_floats}
\protect \endinput
diff --git a/tex/context/base/mkiv/page-str.mkiv b/tex/context/base/mkiv/page-str.mkiv
index b6b8d1cce..57a465603 100644
--- a/tex/context/base/mkiv/page-str.mkiv
+++ b/tex/context/base/mkiv/page-str.mkiv
@@ -29,7 +29,7 @@
%D
%D Remark: marknotes are gone, at least for a while.
-\registerctxluafile{page-str}{1.001}
+\registerctxluafile{page-str}{}
\unprotect
diff --git a/tex/context/base/mkiv/page-txt.mkvi b/tex/context/base/mkiv/page-txt.mkvi
index b67e3aa74..b043b60c7 100644
--- a/tex/context/base/mkiv/page-txt.mkvi
+++ b/tex/context/base/mkiv/page-txt.mkvi
@@ -482,6 +482,8 @@
\c!rightstyle\c!rightcolor\c!rightwidth{#one}}%
\fi\fi\fi}
+\let\currentlayoutelement\relax
+
\unexpanded\def\resettextcontent
{\dotripleempty\page_layouts_reset_text_content}
@@ -502,6 +504,8 @@
%D The placement of a whole line is handled by the next two macros. These are hooked
%D into the general purpose token list registers mentioned before.
+\let\currentlayouttextline\relax
+
\def\page_layouts_place_text_line_indeed#vertical#height%
{\let\currentlayouttextline#vertical%
\ifdim#height>\zeropoint\relax % prevents pagenumbers when zero height
@@ -790,7 +794,7 @@
\ifx\p_strc_pagenumbers_width\empty
\box\scratchbox\tfskip
\else
- \hbox to \p_strc_pagenumbers_width{\box\scratchbox\hss}%
+ \hpack to \p_strc_pagenumbers_width{\box\scratchbox\hss}%
\fi
\fi
\endgroup}
@@ -803,7 +807,7 @@
\ifx\p_strc_pagenumbers_width\empty
\tfskip\box\scratchbox
\else
- \hbox to \p_strc_pagenumbers_width{\hss\box\scratchbox}%
+ \hpack to \p_strc_pagenumbers_width{\hss\box\scratchbox}%
\fi
\fi
\endgroup}
@@ -818,7 +822,7 @@
\def\page_layouts_insert_elements
{\ifcase\pageornamentstate
- \page_layouts_place_elements_indeed
+ \page_layouts_place_elements_indeed % we could have a special flag for always ignored
\fi}
\def\page_layouts_place_elements_indeed
diff --git a/tex/context/base/mkiv/publ-aut.lua b/tex/context/base/mkiv/publ-aut.lua
index 6ad8b1296..91d38a8d9 100644
--- a/tex/context/base/mkiv/publ-aut.lua
+++ b/tex/context/base/mkiv/publ-aut.lua
@@ -13,8 +13,8 @@ end
local lpeg = lpeg
-local type, next, tostring = type, next, tostring
-local concat = table.concat
+local type, next, tostring, tonumber = type, next, tostring, tonumber
+local concat, sortedhash = table.concat, table.sortedhash
local utfsub = utf.sub
local formatters = string.formatters
@@ -123,19 +123,22 @@ end
local authormap = allocate()
publications.authormap = authormap
-local function splitauthor(author)
- local detail = cache[author]
- if detail then
- return detail
- end
- local remapped = authormap[author]
- if remapped then
- report("remapping %a to %a",author,remapped)
- local detail = cache[remapped]
+local function splitauthor(author,justsplit)
+ local detail, remapped
+ if not justsplit then
+ detail = cache[author]
if detail then
- cache[author] = detail
return detail
end
+ remapped = authormap[author]
+ if remapped then
+ report("remapping %a to %a",author,remapped)
+ local detail = cache[remapped]
+ if detail then
+ cache[author] = detail
+ return detail
+ end
+ end
end
local author = remapped or author
local firstnames, vons, surnames, initials, juniors, options
@@ -253,8 +256,10 @@ local function splitauthor(author)
if initials and #initials > 0 then detail.initials = initials end
if juniors and #juniors > 0 then detail.juniors = juniors end
if options and next(options) then detail.options = options end
- cache[author] = detail
- nofhits = nofhits + 1
+ if not justsplit then
+ cache[author] = detail
+ nofhits = nofhits + 1
+ end
return detail
end
@@ -293,8 +298,8 @@ local function splitauthorstring(str)
return authors
end
-publications.splitoneauthor = splitauthor
-publications.splitauthor = splitauthorstring
+publications.splitoneauthor = splitauthor
+publications.splitauthor = splitauthorstring
local function the_initials(initials,symbol,connector)
if not symbol then
@@ -307,6 +312,7 @@ local function the_initials(initials,symbol,connector)
for i=1,#initials do
local initial = initials[i]
if type(initial) == "table" then
+ -- J.-J.
local set, s = { }, 0
for i=1,#initial do
if i > 1 then
@@ -317,6 +323,7 @@ local function the_initials(initials,symbol,connector)
end
r = r + 1 ; result[r] = concat(set)
else
+ -- J.
r = r + 1 ; result[r] = initial .. symbol
end
end
@@ -330,17 +337,20 @@ local ctx_btxsetfirstnames = context.btxsetfirstnames
local ctx_btxsetvons = context.btxsetvons
local ctx_btxsetsurnames = context.btxsetsurnames
local ctx_btxsetjuniors = context.btxsetjuniors
-local ctx_btxciteauthorsetup = context.btxciteauthorsetup
-local ctx_btxlistauthorsetup = context.btxlistauthorsetup
local ctx_btxsetauthorvariant = context.btxsetauthorvariant
+
local ctx_btxstartauthor = context.btxstartauthor
local ctx_btxstopauthor = context.btxstopauthor
+local ctx_btxciteauthorsetup = context.btxciteauthorsetup
+local ctx_btxlistauthorsetup = context.btxlistauthorsetup
+
local concatstate = publications.concatstate
local f_invalid = formatters[""]
-local currentauthordata = nil
-local currentauthorsymbol = nil
+local currentauthordata = nil
+local currentauthorsymbol = nil
+local currentauthorconnector = nil
local manipulators = typesetters.manipulators
local splitmanipulation = manipulators.splitspecification
@@ -359,7 +369,7 @@ local function value(i,field)
end
implement { name = "btxcurrentfirstnames", arguments = "integer", actions = function(i) local v = value(i,"firstnames") if v then context(concat(v," ")) end end }
-implement { name = "btxcurrentinitials", arguments = "integer", actions = function(i) local v = value(i,"initials") if v then context(concat(the_initials(v,currentauthorsymbol))) end end }
+implement { name = "btxcurrentinitials", arguments = "integer", actions = function(i) local v = value(i,"initials") if v then context(concat(the_initials(v,currentauthorsymbol,currentauthorconnector))) end end }
implement { name = "btxcurrentjuniors", arguments = "integer", actions = function(i) local v = value(i,"juniors") if v then context(concat(v," ")) end end }
implement { name = "btxcurrentsurnames", arguments = "integer", actions = function(i) local v = value(i,"surnames") if v then context(concat(v," ")) end end }
implement { name = "btxcurrentvons", arguments = "integer", actions = function(i) local v = value(i,"vons") if v then context(concat(v," ")) end end }
@@ -380,7 +390,7 @@ local function btxauthorfield(i,field)
context(applymanipulation(manipulator,value) or value)
end
elseif field == "initials" then
- context(concat(the_initials(value,currentauthorsymbol)))
+ context(concat(the_initials(value,currentauthorsymbol,currentauthorconnector)))
else
context(concat(value," "))
end
@@ -408,6 +418,7 @@ local function btxauthor(dataset,tag,field,settings)
local etallast = etaloption[v_last]
local combiner = settings.combiner
local symbol = settings.symbol
+ local connector = settings.connector
local index = settings.index
if not combiner or combiner == "" then
combiner = "normal"
@@ -421,8 +432,9 @@ local function btxauthor(dataset,tag,field,settings)
else
etallast = false
end
- currentauthordata = split
- currentauthorsymbol = symbol
+ currentauthordata = split
+ currentauthorsymbol = symbol
+ currentauthorconnector = connector
local function oneauthor(i,last,justone)
local author = split[i]
@@ -508,6 +520,7 @@ implement {
{ "etaldisplay" },
{ "etaloption" },
{ "symbol" },
+ { "connector" },
}
}
}
@@ -530,6 +543,9 @@ local collapsers = allocate { }
publications.authorcollapsers = collapsers
+-- making a constructor doesn't make the code nicer as the_initials is an
+-- exception
+
local function default(author) -- one author
local hash = author.hash
if hash then
@@ -707,8 +723,101 @@ authorhashers.normalshort = function(authors)
end
end
-authorhashers.normalinverted = authorhashers.normal
-authorhashers.invertedshort = authorhashers.normalshort
+local sequentialhash = function(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local firstnames = author.firstnames
+ local juniors = author.juniors
+ if firstnames and #firstnames > 0 then
+ for j=1,#firstnames do
+ nofresult = nofresult + 1
+ result[nofresult] = firstnames[j]
+ end
+ end
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+local sequentialshorthash = function(authors)
+ if type(authors) == "table" then
+ local n = #authors
+ if n == 0 then
+ return ""
+ end
+ local result = { }
+ local nofresult = 0
+ for i=1,n do
+ local author = authors[i]
+ local vons = author.vons
+ local surnames = author.surnames
+ local initials = author.initials
+ local juniors = author.juniors
+ if initials and #initials > 0 then
+ initials = the_initials(initials)
+ for j=1,#initials do
+ nofresult = nofresult + 1
+ result[nofresult] = initials[j]
+ end
+ end
+ if vons and #vons > 0 then
+ for j=1,#vons do
+ nofresult = nofresult + 1
+ result[nofresult] = vons[j]
+ end
+ end
+ if surnames and #surnames > 0 then
+ for j=1,#surnames do
+ nofresult = nofresult + 1
+ result[nofresult] = surnames[j]
+ end
+ end
+ if juniors and #juniors > 0 then
+ for j=1,#juniors do
+ nofresult = nofresult + 1
+ result[nofresult] = juniors[j]
+ end
+ end
+ end
+ return concat(result," ")
+ else
+ return authors
+ end
+end
+
+authorhashers.sequential = sequentialhash
+authorhashers.sequentialshort = sequentialshorthash
+authorhashers.normalinverted = authorhashers.normal
+authorhashers.invertedshort = authorhashers.normalshort
local p_clean = Cs ( (
P("\\btxcmd") / "" -- better keep the argument
@@ -868,6 +977,31 @@ implement {
name = "btxremapauthor",
arguments = { "string", "string" },
actions = function(k,v)
- publications.authormap[k] = v
+ local a = { splitauthor(k,true) }
+ local s1 = sequentialhash(a)
+ local s2 = sequentialshorthash(a)
+ if not authormap[k] then
+ authormap[k] = v
+ report("%a mapped onto %a",k,v)
+ end
+ if not authormap[s1] then
+ authormap[s1] = v
+ report("%a mapped onto %a, derived from %a",s1,v,k)
+ end
+ if not authormap[s2] then
+ authormap[s2] = v
+ report("%a mapped onto %a, derived from %a",s2,v,k)
+ end
+ end
+}
+
+implement {
+ name = "btxshowauthorremapping",
+ actions = function(k,v)
+ report("start author remapping")
+ for k, v in sortedhash(authormap) do
+ report(" %s => %s",k,v)
+ end
+ report("stop author remapping")
end
}
diff --git a/tex/context/base/mkiv/publ-dat.lua b/tex/context/base/mkiv/publ-dat.lua
index 8eb923407..310df82f3 100644
--- a/tex/context/base/mkiv/publ-dat.lua
+++ b/tex/context/base/mkiv/publ-dat.lua
@@ -24,7 +24,7 @@ end
local lower, find, sub = string.lower, string.find, string.sub
local concat, copy, tohash = table.concat, table.copy, table.tohash
-local next, type, rawget = next, type, rawget
+local next, type, rawget, tonumber = next, type, rawget, tonumber
local utfchar = utf.char
local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
local textoutf = characters and characters.tex.toutf
@@ -138,14 +138,15 @@ local virtuals = allocate {
}
local defaulttypes = allocate {
- author = "author",
- editor = "author",
- publisher = "author",
- page = "pagenumber",
- pages = "pagenumber",
- keywords = "keyword",
- doi = "url",
- url = "url",
+ author = "author",
+ editor = "author",
+ translator = "author",
+ -- publisher = "author",
+ page = "pagenumber",
+ pages = "pagenumber",
+ keywords = "keyword",
+ doi = "url",
+ url = "url",
}
local defaultsets = allocate {
@@ -265,11 +266,13 @@ local specifications = setmetatableindex(function(t,name)
local fullname = resolvers.findfile(filename) or ""
if fullname == "" then
report("no data definition file %a for %a",filename,name)
+ t[name] = default
return default
end
local specification = table.load(fullname)
if not specification then
report("invalid data definition file %a for %a",fullname,name)
+ t[name] = default
return default
end
--
@@ -320,7 +323,7 @@ end
function publications.parenttag(dataset,tag)
if not dataset or not tag then
report("error in specification, dataset %a, tag %a",dataset,tag)
- elseif find(tag,"%+") then
+ elseif find(tag,"+",1,true) then
local tags = lpegmatch(p_splitter,tag)
local parent = tags[1]
local current = datasets[dataset]
@@ -862,7 +865,7 @@ do
publications.loadbibdata(dataset,data,fullname,kind)
end
- function loaders.lua(dataset,filename) -- if filename is a table we load that one
+ function loaders.lua(dataset,filename,loader) -- if filename is a table we load that one
local current, data, fullname
if type(filename) == "table" then
current = datasets[dataset]
@@ -873,7 +876,7 @@ do
return
end
current = datasets[dataset]
- data = table.load(fullname)
+ data = (loader or table.load)(fullname)
end
if data then
local luadata = current.luadata
@@ -888,6 +891,10 @@ do
end
end
+ function loaders.json(dataset,filename)
+ loaders.lua(dataset,filename,utilities.json.load)
+ end
+
function loaders.buffer(dataset,name) -- if filename is a table we load that one
local current = datasets[dataset]
local barename = file.removesuffix(name)
diff --git a/tex/context/base/mkiv/publ-imp-apa.lua b/tex/context/base/mkiv/publ-imp-apa.lua
index a725bf22f..65dee0717 100644
--- a/tex/context/base/mkiv/publ-imp-apa.lua
+++ b/tex/context/base/mkiv/publ-imp-apa.lua
@@ -84,11 +84,11 @@ local categories = specification.categories
categories.article = {
sets = {
- author = { "author", "editor", "title" },
+ author = { "author", "organization", "editor", "title" },
doi = generic.doi,
},
required = {
- "author"
+ "author", -- a set
},
optional = {
"withauthor", "translator",
@@ -96,6 +96,13 @@ categories.article = {
"subtitle", "type", "file",
"journal", "volume", "number", "pages",
"doi", "note",
+ -- APA ignores this:
+ --
+ -- "month",
+ --
+ -- fields defined in jabref but presently ignored:
+ --
+ -- "issn",
},
}
@@ -123,7 +130,7 @@ categories.newspaper = categories.magazine
categories.periodical = {
sets = {
- author = { "editor", "publisher" },
+ author = { "editor", "publisher", "organization", },
doi = generic.doi,
},
required = {
@@ -175,6 +182,7 @@ categories.book = {
"editionset", "series",
"address",
"doi", "note",
+ "abstract",
},
}
@@ -182,7 +190,7 @@ categories.book = {
categories.inbook = {
sets = {
- author = { "author", "editor", "publisher", "title", },
+ author = { "author", "organization", "editor", "publisher", "title", },
ineditor = { "editor" },
editionset = generic.editionset,
doi = generic.doi,
@@ -436,6 +444,7 @@ categories.unpublished = {
categories.electronic = {
sets = {
doi = generic.doi,
+ author = { "author", "organization", },
},
required = {
"title"
diff --git a/tex/context/base/mkiv/publ-imp-apa.mkvi b/tex/context/base/mkiv/publ-imp-apa.mkvi
index 182802331..cd78a8799 100644
--- a/tex/context/base/mkiv/publ-imp-apa.mkvi
+++ b/tex/context/base/mkiv/publ-imp-apa.mkvi
@@ -310,7 +310,8 @@
\c!etallimit=5,
\c!etaldisplay=1, % TODO: when 2-4, show all first time, etaldisplay subsequently...
\c!authorconversion=\v!name,
- \c!sorttype=normal,
+ \c!sorttype=normal, % \v!normal ?
+ \c!style=,
\c!compress=\v!yes, % note that cite sorts only work with compress=yes.
\c!separator:names:3={\btxcomma\btxlabeltext{and}\space}, % not \textampersand
\c!separator:names:4={\btxspace\btxlabeltext{and}\space}] % not \textampersand
@@ -344,6 +345,18 @@
[apa:\s!cite:author]
[apa:\s!cite]
+\definebtx
+ [apa:\s!cite:editor]
+ [apa:\s!cite:author]
+
+\definebtx
+ [apa:\s!cite:translator]
+ [apa:\s!cite:author]
+
+\definebtx
+ [apa:\s!cite:organization]
+ [apa:\s!cite]
+
\definebtx
[apa:\s!cite:authoryear]
[apa:\s!cite:author]
@@ -562,7 +575,7 @@
apa:Editors=Éditeurs,
apa:Volume=Volume,
apa:Volumes=Volumes,
- apa:nd={s.d.} % sans date
+ apa:nd={s.d.}, % sans date
apa:supplement=Supplément,
apa:MotionPicture={Film cinématographique},
apa:Writer=Scénariste,
@@ -668,7 +681,7 @@
% used in publ-imp-page.mkvi
-\startsetups [btx:apa:list:page-or-pages]
+\startsetups btx:apa:list:page-or-pages
\ifx\currentbtxlastpage\empty
\btxlabeltext{p}
\else
@@ -828,6 +841,14 @@
\btxdoifelse {year} {
\btxflush{year}
\btxflushsuffix
+ \btxdoif {month} { % month and day optional in publ-imp-apa.lua
+ \btxcomma
+ \btxflush{month}
+ \btxdoif {day} {
+ \btxspace
+ \btxflush{day}
+ }
+ }
} {
\fastsetup{btx:apa:nd}
% Hans: the following won't work but should.
@@ -843,7 +864,11 @@
\starttexdefinition unexpanded btx:apa:author-or-editor #author
\btxdoifelse {#author} {
- \btxflush{#author}
+ \btxstartstyleandcolor[apa:\s!list:#author]
+ \btxusecommand[apa:\s!list:#author]{
+ \btxflush{#author}
+ }
+ \btxstopstyleandcolor
% use \processaction [] [] here?
\doifelse {\btxfoundname{#author}} {editor} {
\btxleftparenthesis
@@ -889,7 +914,11 @@
\btxrightparenthesis
\removeunwantedspaces
\btxparameter{\c!separator:names:3}
- \btxflush{director}
+ \btxstartstyleandcolor[apa:\s!list:director]
+ \btxusecommand[apa:\s!list:director]{
+ \btxflush{director}
+ }
+ \btxstopstyleandcolor
\btxleftparenthesis
\btxsingularorplural {director} {
\btxlabeltext{apa:Director}
@@ -904,7 +933,11 @@
\btxleftparenthesis
\btxlabeltext{with}
\btxspace
- \btxflush{withauthor}
+ \btxstartstyleandcolor[apa:\s!list:withauthor]
+ \btxusecommand[apa:\s!list:withauthor]{
+ \btxflush{withauthor}
+ }
+ \btxstopstyleandcolor
\btxrightparenthesis
}
} {
@@ -950,6 +983,16 @@
}
\stoptexdefinition
+\starttexdefinition unexpanded btx:apa:organization-if-not-author
+ \btxdoif {organization} {
+ \doifnot {\btxfoundname{author}} {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxcomma
+ }
+ }
+\stoptexdefinition
+
% TODO: The title is terminated with period. However,
% we probably don't want this before the parenthesis.
@@ -1165,21 +1208,25 @@
\removeunwantedspaces
\stoptexdefinition
+\starttexdefinition unexpanded btx:apa:type
+ \btxdoif {type} {
+ \btxleftbracket
+ \btxflush{type}
+ \btxrightbracketperiod
+ }
+\stoptexdefinition
+
% Then setups, by category
% An article from a journal
-% Required fields: author or editor or title, journal, (year).
+% Required fields: author or organization or editor or title, journal, (year).
% Optional fields: volume, number, pages, type, doi, url, note.
% Note that bibtex (and tools) do not include editor (e.g. special issue or section)
\startsetups btx:apa:list:article
\texdefinition{btx:apa:authoryear}
\texdefinition{btx:apa:title-if-not-placed}
- \btxdoif {type} {
- \btxleftbracket
- \btxflush{type}
- \btxrightbracketperiod
- }
+ \texdefinition{btx:apa:type}
\texdefinition{btx:apa:journal-volume-number-pages}
\texdefinition{btx:apa:url-doi-note}
\stopsetups
@@ -1230,6 +1277,7 @@
\startsetups btx:apa:list:book
\texdefinition{btx:apa:authoryear}
\texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:type}
\texdefinition{btx:apa:editionset}
\texdefinition{btx:apa:wherefrom-publisher}
\texdefinition{btx:apa:url-doi-note}
@@ -1247,6 +1295,7 @@
\startsetups btx:apa:list:inbook
\texdefinition{btx:apa:authoryear}
\texdefinition{btx:apa:title-if-not-placed}
+ \texdefinition{btx:apa:type}
\texdefinition{btx:apa:editor-in}
\texdefinition{btx:apa:editionset}
\texdefinition{btx:apa:wherefrom-publisher}
@@ -1280,11 +1329,7 @@
\texdefinition{btx:apa:title-if-not-placed}
\texdefinition{btx:apa:editor-in}
\texdefinition{btx:apa:editionset}
- \btxdoif {organization} {
- \btxspace
- \btxflush{organization}
- \btxcomma
- }
+ \texdefinition{btx:apa:organization-if-not-author}
\texdefinition{btx:apa:wherefrom-publisher}
\texdefinition{btx:apa:url-doi-note}
\stopsetups
@@ -1386,7 +1431,11 @@
\btxflush{nationality}
}
\btxspace
- \btxlabeltext{patent}
+ \btxdoifelse{type}{
+ \btxflush{type}
+ }{
+ \btxlabeltext{patent}
+ }
\btxdoif {number} {
\btxspace
\btxlabeltext{apa:number}
@@ -1411,11 +1460,8 @@
\startsetups btx:apa:list:electronic
\texdefinition{btx:apa:authoryear}
\texdefinition{btx:apa:title-if-not-placed}
- \btxdoif {organization} {
- \btxspace
- \btxflush{organization}
- \btxperiod
- }
+ \texdefinition{btx:apa:type}
+ \texdefinition{btx:apa:organization-if-not-author}
\btxdoif {howpublished} {
\btxspace
\btxflush{howpublished}
@@ -1466,11 +1512,7 @@
\btxperiod
}
}
- \btxdoif {type} {
- \btxleftbracket
- \btxflush{type}
- \btxrightbracketperiod
- }
+ \texdefinition{btx:apa:type}
\texdefinition{btx:apa:wherefrom-publisher}
\texdefinition{btx:apa:url-doi-note}
\stopsetups
@@ -1483,7 +1525,7 @@
\fastsetup{btx:apa:list:book}
\stopsetups
-% Use this type when nothing else fits.
+% Use this category when nothing else fits.
% Required fields: none.
% Optional fields: author, title, howpublished, month, year, note.
diff --git a/tex/context/base/mkiv/publ-imp-aps.lua b/tex/context/base/mkiv/publ-imp-aps.lua
index c15ffe918..f5b33c777 100644
--- a/tex/context/base/mkiv/publ-imp-aps.lua
+++ b/tex/context/base/mkiv/publ-imp-aps.lua
@@ -412,6 +412,10 @@ categories.unpublished = {
-- like misc below but includes organization.
categories.electronic = {
+ sets = {
+ author = { "author", "collaboration", "organization", },
+ howpublished = { "howpublished", "doi", "url", },
+ },
required = {
"title"
},
@@ -420,8 +424,8 @@ categories.electronic = {
"year", "month",
"author",
"collaboration",
- "address",
"organization",
+ "address",
"howpublished",
"doi", "url", "note",
},
@@ -430,6 +434,10 @@ categories.electronic = {
-- use this type when nothing else fits.
categories.misc = {
+ sets = {
+ author = { "author", "collaboration", },
+ howpublished = { "howpublished", "doi", "url", },
+ },
required = {
-- nothing is really important here
},
diff --git a/tex/context/base/mkiv/publ-imp-aps.mkvi b/tex/context/base/mkiv/publ-imp-aps.mkvi
index e9cbd7aaf..4180b6491 100644
--- a/tex/context/base/mkiv/publ-imp-aps.mkvi
+++ b/tex/context/base/mkiv/publ-imp-aps.mkvi
@@ -282,6 +282,10 @@
[aps:\s!cite:author]
[aps:\s!cite]
+\definebtx
+ [aps:\s!cite:organization]
+ [aps:\s!cite]
+
\definebtx
[aps:\s!cite:authoryear]
[aps:\s!cite:author]
@@ -551,9 +555,9 @@
\startsetups [btx:aps:list:page-or-pages]
\ifx\currentbtxlastpage\empty
- \btxlabeltext{p}.
+ \btxlabeltext{p}
\else
- \btxlabeltext{pp}.
+ \btxlabeltext{pp}
\fi
\btxnbsp
\stopsetups
@@ -651,6 +655,16 @@
\btxcomma
\stoptexdefinition
+\starttexdefinition unexpanded btx:aps:organization-if-not-author
+ \btxdoif {organization} {
+ \doifnot {\btxfoundname{author}} {organization} {
+ \btxspace
+ \btxflush{organization}
+ \btxcomma
+ }
+ }
+\stoptexdefinition
+
\starttexdefinition unexpanded btx:aps:editor-in
\btxdoif {booktitle} {
\btxlabeltext{In}
@@ -903,6 +917,7 @@
\texdefinition{btx:aps:title}
\texdefinition{btx:aps:editor-in}
\texdefinition{btx:aps:editionset}
+ %\texdefination{btx:aps:organization-if-not-author}% first check publ-imp-aps.lua
\btxdoif {organization} {
\btxspace
\btxflush{organization}
@@ -1035,11 +1050,7 @@
\startsetups btx:aps:list:electronic
\texdefinition{btx:aps:author}
\texdefinition{btx:aps:title}
- \btxdoif {organization} {
- \btxspace
- \btxflush{organization}
- \btxcomma
- }
+ \texdefinition{btx:aps:organization-if-not-author}
\btxdoif {howpublished} {
\texdefinition{btx:aps:doi-url} {\btxspace\btxflush{howpublished}}
}
diff --git a/tex/context/base/mkiv/publ-imp-cite.mkvi b/tex/context/base/mkiv/publ-imp-cite.mkvi
index be23326b8..8fe96429d 100644
--- a/tex/context/base/mkiv/publ-imp-cite.mkvi
+++ b/tex/context/base/mkiv/publ-imp-cite.mkvi
@@ -62,23 +62,25 @@
\stopsetups
\startsetups btx:cite:lefttext
- \ifx\currentbtxlefttext\empty
- \else
- \setbtxparameterset{\s!cite}{lefttext}
- \btxparameter\c!left
- \currentbtxlefttext
- \btxparameter\c!right
+ \ifx\currentbtxlefttext\empty \else
+ \startbtxrunningstyleandcolor
+ \setbtxparameterset{\s!cite}{lefttext}
+ \btxparameter\c!left
+ \currentbtxlefttext
+ \btxparameter\c!right
+ \stopbtxrunningstyleandcolor
\fi
\stopsetups
\startsetups btx:cite:righttext
- \ifx\currentbtxrighttext\empty
- \else
- \setbtxparameterset{\s!cite}{righttext}
- \btxparameter\c!left
- \currentbtxrighttext
- \btxparameter\c!right
- \fi
+ \ifx\currentbtxrighttext\empty \else
+ \startbtxrunningstyleandcolor
+ \setbtxparameterset{\s!cite}{righttext}
+ \btxparameter\c!left
+ \currentbtxrighttext
+ \btxparameter\c!right
+ \stopbtxrunningstyleandcolor
+ \fi
\stopsetups
\startsetups btx:cite:invalid
@@ -86,44 +88,17 @@
{\tt <\currentbtxreference>}
\stopsetups
-\starttexdefinition unexpanded btx:cite:concat
- \btxparameter{\c!separator:\number\currentbtxconcat}
-\stoptexdefinition
+\startsetups btx:cite:concat
+ \startbtxrunningstyleandcolor
+ \btxparameter{\c!separator:\number\currentbtxconcat}
+ \stopbtxrunningstyleandcolor
+\stopsetups
% when we have an author-year combination, the first and seconds is not
% fields data but something more complex (that itself calls for a setup)
-% \startsetups btx:cite:normal
-% \texdefinition{\s!btx:\s!cite:concat}
-% \currentbtxlefttext
-% \ifx\currentbtxfirst\empty
-% \fastsetup{\s!btx:\s!cite:\s!empty}
-% \else
-% \texdefinition {\s!btx:\s!cite:inject} {
-% \btxcitereference
-% \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
-% \currentbtxfirst
-% }
-% }
-% \ifx\currentbtxsecond\empty
-% \btxflushsuffix
-% \else
-% \btxparameter\c!inbetween
-% \texdefinition {\s!btx:\s!cite:inject} {
-% \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
-% \currentbtxsecond
-% }
-% }
-% % quite likely an error
-% \btxflushsuffix
-% \fi
-% \fi
-% \currentbtxrighttext
-% \stopsetups
-
\startsetups btx:cite:normal
- \texdefinition{\s!btx:\s!cite:concat}
- %\currentbtxlefttext
+ \fastsetup{\s!btx:\s!cite:concat}
\fastsetup{\s!btx:\s!cite:lefttext}
\ifx\currentbtxfirst\empty
\fastsetup{\s!btx:\s!cite:\s!empty}
@@ -136,21 +111,6 @@
}
}
\else
- % \texdefinition {\s!btx:\s!cite:inject} {
- % \btxcitereference
- % \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
- % \currentbtxfirst
- % }
- % }
- % \btxparameter\c!inbetween
- % \texdefinition {\s!btx:\s!cite:inject} {
- % \btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
- % \currentbtxsecond
- % }
- % }
- % \btxflushsuffix
- %
- % probably better:
\texdefinition {\s!btx:\s!cite:inject} {
\btxcitereference
\btxusecommand [\currentbtxspecification:cite:\currentbtxcitealternative] {
@@ -161,13 +121,11 @@
}
}
\fi\fi
- %\currentbtxrighttext
\fastsetup{\s!btx:\s!cite:righttext}
\stopsetups
\startsetups btx:cite:range
- \texdefinition{\s!btx:\s!cite:concat}
- %\currentbtxlefttext
+ \fastsetup{\s!btx:\s!cite:concat}
\fastsetup{\s!btx:\s!cite:lefttext}
\ifx\currentbtxfirst\empty
\fastsetup{\s!btx:\s!cite:\s!empty}
@@ -184,15 +142,13 @@
}
}
\fi
- %\currentbtxrighttext
\fastsetup{\s!btx:\s!cite:righttext}
\stopsetups
% somehow related to keywords:
\startsetups btx:cite:listelement
- \texdefinition{\s!btx:\s!cite:concat}
- %\currentbtxlefttext
+ \fastsetup{\s!btx:\s!cite:concat}
\fastsetup{\s!btx:\s!cite:lefttext}
\ifx\currentbtxfirst\empty
\fastsetup{\s!btx:\s!cite:\s!empty}
@@ -202,16 +158,13 @@
\currentbtxfirst
}
\fi
- %\currentbtxrighttext
\fastsetup{\s!btx:\s!cite:righttext}
\stopsetups
\startsetups \s!btx:\s!cite:entry
- \texdefinition{\s!btx:\s!cite:concat}
- %\currentbtxlefttext
+ \fastsetup{\s!btx:\s!cite:concat}
\fastsetup{\s!btx:\s!cite:lefttext}
\btxhandleciteentry
- %\currentbtxrighttext
\fastsetup{\s!btx:\s!cite:righttext}
\stopsetups
@@ -279,6 +232,9 @@
\startsetups \s!btx:\s!cite:pages
\fastsetup{\s!btx:\s!cite:range}
\stopsetups
+\startsetups \s!btx:\s!cite:organization
+ \fastsetup{\s!btx:\s!cite:range}
+\stopsetups
% is the next one used?
% Yes, bibtex is a mess and one can have pages or sometimes page
diff --git a/tex/context/base/mkiv/publ-imp-default.lua b/tex/context/base/mkiv/publ-imp-default.lua
index 61b08f30c..f77121c16 100644
--- a/tex/context/base/mkiv/publ-imp-default.lua
+++ b/tex/context/base/mkiv/publ-imp-default.lua
@@ -101,6 +101,7 @@ return {
"title",
"keywords",
"journal", "volume", "number", "pages",
+ "note",
},
},
book = {
@@ -109,15 +110,16 @@ return {
editionset = { "edition", "volume", "number" },
},
required = {
- "author", -- a set
"title",
"year",
},
optional = {
+ "author", -- a set
"subtitle",
"keywords",
"publisher", "address",
"editionset",
+ "note",
},
},
},
diff --git a/tex/context/base/mkiv/publ-imp-default.mkvi b/tex/context/base/mkiv/publ-imp-default.mkvi
index 564bfcf4e..22638c900 100644
--- a/tex/context/base/mkiv/publ-imp-default.mkvi
+++ b/tex/context/base/mkiv/publ-imp-default.mkvi
@@ -46,7 +46,7 @@
[\s!default:\s!cite]
[\s!default]
[\c!alternative=num,
- [\c!compress=\v!yes,
+ \c!compress=\v!yes,
\c!sorttype=normal,
\c!authorconversion=\v!name]
diff --git a/tex/context/base/mkiv/publ-imp-list.mkvi b/tex/context/base/mkiv/publ-imp-list.mkvi
index 68ccaef01..b00c4bd40 100644
--- a/tex/context/base/mkiv/publ-imp-list.mkvi
+++ b/tex/context/base/mkiv/publ-imp-list.mkvi
@@ -13,8 +13,34 @@
\unprotect
-\starttexdefinition unexpanded btx:list:inject #content
- \ifconditional\btxinteractive
+\starttexdefinition unexpanded btx:list:inject:text #content
+ \ifconditional\btxinteractivetext
+ \ifx\currentbtxinternal\empty
+ #content
+ \else
+ \goto {
+ #content
+ } [
+ \s!internal(\currentbtxinternal)
+ ]
+ \fi
+ \else
+ #content
+ \fi
+\stoptexdefinition
+
+\startsetups \s!btx:\s!list:text
+ \texdefinition {\s!btx:\s!list:inject:text} {
+ \btxflushlisttext
+ \ifx\currentbtxcombis\empty \else
+ \btxrenderingparameter\c!separator
+ \btxflushlistcombis
+ \fi
+ }
+\stopsetups
+
+\starttexdefinition unexpanded btx:list:inject:number #content
+ \ifconditional\btxinteractivenumber
\ifx\currentbtxinternal\empty
#content
\else
@@ -33,64 +59,39 @@
\space
\stoptexdefinition
-\startsetups[\s!btx:\s!list:\s!page]
- \texdefinition{\s!btx:\s!list:concat}
- \texdefinition{\s!btx:\s!list:inject} {
- % real pagenumber: todo, userpage
- \currentbtxfirst
- % order
- \high{\currentbtxsecond}
- }
-\stopsetups
-
\startsetups[\s!btx:\s!list:\s!numbering]
- \texdefinition {\s!btx:\s!list:inject} {
+ \texdefinition {\s!btx:\s!list:inject:number} {
\currentbtxfirst
\btxparameter\c!stopper
}
\stopsetups
-% Hans: can the following setups be condensed to one using some variable?
-
-\startsetups[\s!btx:\s!list:\s!numbering:default]
- \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:default]
- \btxusecommand[\currentbtxspecification:\s!list:\s!numbering:default] {
+\starttexdefinition unexpanded btx:list:numbering #variant
+ \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:#variant]
+ \btxusecommand[\currentbtxspecification:\s!list:\s!numbering:#variant] {
\fastsetup{\s!btx:\s!list:\s!numbering}
}
\btxstopstyleandcolor
+\stoptexdefinition
+
+\startsetups[\s!btx:\s!list:\s!numbering:default]
+ \texdefinition {\s!btx:\s!list:\s!numbering} {default}
\stopsetups
\startsetups[\s!btx:\s!list:\s!numbering:num]
- \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:num]
- \btxusecommand[\currentbtxspecification:\s!list:\s!numbering:num] {
- \fastsetup{\s!btx:\s!list:\s!numbering}
- }
- \btxstopstyleandcolor
+ \texdefinition {\s!btx:\s!list:\s!numbering} {num}
\stopsetups
\startsetups[\s!btx:\s!list:\s!numbering:short]
- \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:short]
- \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:short] {
- \fastsetup{\s!btx:\s!list:\s!numbering}
- \btxflushsuffix
- }
- \btxstopstyleandcolor
+ \texdefinition {\s!btx:\s!list:\s!numbering} {short}
\stopsetups
\startsetups[\s!btx:\s!list:\s!numbering:tag]
- \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:tag]
- \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:tag] {
- \fastsetup{\s!btx:\s!list:\s!numbering}
- }
- \btxstopstyleandcolor
+ \texdefinition {\s!btx:\s!list:\s!numbering} {tag}
\stopsetups
\startsetups[\s!btx:\s!list:\s!numbering:index]
- \btxstartstyleandcolor [\currentbtxspecification:\s!list:\s!numbering:index]
- \btxusecommand [\currentbtxspecification:\s!list:\s!numbering:index] {
- \fastsetup{\s!btx:\s!list:\s!numbering}
- }
- \btxstopstyleandcolor
+ \texdefinition {\s!btx:\s!list:\s!numbering} {index}
\stopsetups
\protect
diff --git a/tex/context/base/mkiv/publ-imp-page.mkvi b/tex/context/base/mkiv/publ-imp-page.mkvi
index 2d9040b10..5bcfe6ea3 100644
--- a/tex/context/base/mkiv/publ-imp-page.mkvi
+++ b/tex/context/base/mkiv/publ-imp-page.mkvi
@@ -24,7 +24,7 @@
\startsetups [\s!btx:\s!list:\s!page]
\fastsetup{\s!btx:\s!list:\s!page:concat}
\fastsetup{\s!btx:\currentbtxspecification:\s!list:page-or-pages}
- \ifconditional\btxinteractive
+ \ifconditional\btxinteractivepage
\goto {
\currentbtxfirstpage
} [
diff --git a/tex/context/base/mkiv/publ-ini.lua b/tex/context/base/mkiv/publ-ini.lua
index 52642c89d..038a15d5a 100644
--- a/tex/context/base/mkiv/publ-ini.lua
+++ b/tex/context/base/mkiv/publ-ini.lua
@@ -57,6 +57,8 @@ local enhancers = publications.enhancers
local tracers = publications.tracers or { }
publications.tracers = tracers
+local setmacro = interfaces.setmacro -- todo
+local setcounter = tex.setcounter -- todo
local variables = interfaces.variables
local v_local = variables["local"]
@@ -77,10 +79,6 @@ local v_dataset = variables.dataset
local conditionals = tex.conditionals
-local logsnewline = logs.newline
-local logspushtarget = logs.pushtarget
-local logspoptarget = logs.poptarget
-
local isdefined = tex.isdefined
----- basicsorter = sorters.basicsorter -- (a,b)
@@ -109,7 +107,6 @@ local ctx_doifnot = commands.doifnot
local ctx_gobbletwoarguments = context.gobbletwoarguments
local ctx_btxhandlelistentry = context.btxhandlelistentry
-local ctx_btxhandlelisttextentry = context.btxhandlelisttextentry
local ctx_btxhandlecombientry = context.btxhandlecombientry
local ctx_btxchecklistentry = context.btxchecklistentry
@@ -119,13 +116,8 @@ local ctx_btxsetnumber = context.btxsetnumber
local ctx_btxsetlanguage = context.btxsetlanguage
local ctx_btxsetcombis = context.btxsetcombis
local ctx_btxsetcategory = context.btxsetcategory
-local ctx_btxcitesetup = context.btxcitesetup
-local ctx_btxsubcitesetup = context.btxsubcitesetup
-local ctx_btxnumberingsetup = context.btxnumberingsetup
-local ctx_btxpagesetup = context.btxpagesetup
local ctx_btxsetfirst = context.btxsetfirst
local ctx_btxsetsecond = context.btxsetsecond
------ ctx_btxsetthird = context.btxsetthird
local ctx_btxsetsuffix = context.btxsetsuffix
local ctx_btxsetinternal = context.btxsetinternal
local ctx_btxsetlefttext = context.btxsetlefttext
@@ -133,12 +125,31 @@ local ctx_btxsetrighttext = context.btxsetrighttext
local ctx_btxsetbefore = context.btxsetbefore
local ctx_btxsetafter = context.btxsetafter
local ctx_btxsetbacklink = context.btxsetbacklink
-local ctx_btxsetcount = context.btxsetcount
-local ctx_btxsetconcat = context.btxsetconcat
-local ctx_btxsetfirstpage = context.btxsetfirstpage
-local ctx_btxsetlastpage = context.btxsetlastpage
local ctx_btxsetfirstinternal = context.btxsetfirstinternal
local ctx_btxsetlastinternal = context.btxsetlastinternal
+local ctx_btxsetauthorfield = context.btxsetauthorfield
+
+-- local ctx_btxsetdataset = function(s) setmacro("currentbtxdataset", s) end -- context.btxsetdataset
+-- local ctx_btxsettag = function(s) setmacro("currentbtxtag", s) end -- context.btxsettag
+-- local ctx_btxsetnumber = function(s) setmacro("currentbtxnumber", s) end -- context.btxsetnumber
+-- local ctx_btxsetlanguage = function(s) setmacro("currentbtxlanguage", s) end -- context.btxsetlanguage
+-- local ctx_btxsetcombis = function(s) setmacro("currentbtxcombis", s) end -- context.btxsetcombis
+-- local ctx_btxsetcategory = function(s) setmacro("currentbtxcategory", s) end -- context.btxsetcategory
+-- local ctx_btxsetfirst = function(s) setmacro("currentbtxfirst", s) end -- context.btxsetfirst
+-- local ctx_btxsetsecond = function(s) setmacro("currentbtxsecond", s) end -- context.btxsetsecond
+-- local ctx_btxsetsuffix = function(s) setmacro("currentbtxsuffix", s) end -- context.btxsetsuffix
+-- local ctx_btxsetinternal = function(s) setmacro("currentbtxinternal", s) end -- context.btxsetinternal
+-- local ctx_btxsetlefttext = function(s) setmacro("currentbtxlefttext", s) end -- context.btxsetlefttext
+-- local ctx_btxsetrighttext = function(s) setmacro("currentbtxrighttext", s) end -- context.btxsetrighttext
+-- local ctx_btxsetbefore = function(s) setmacro("currentbtxbefore", s) end -- context.btxsetbefore
+-- local ctx_btxsetafter = function(s) setmacro("currentbtxafter", s) end -- context.btxsetafter
+-- local ctx_btxsetbacklink = function(s) setmacro("currentbtxbacklink", s) end -- context.btxsetbacklink
+-- local ctx_btxsetfirstinternal = function(s) setmacro("currentbtxfirstinternal", s) end -- context.btxsetfirstinternal
+-- local ctx_btxsetlastinternal = function(s) setmacro("currentbtxlastinternal", s) end -- context.btxsetlastinternal
+
+local ctx_btxsetfirstpage = context.btxsetfirstpage
+local ctx_btxsetlastpage = context.btxsetlastpage
+
local ctx_btxstartcite = context.btxstartcite
local ctx_btxstopcite = context.btxstopcite
local ctx_btxstartciteauthor = context.btxstartciteauthor
@@ -149,12 +160,22 @@ local ctx_btxstartlistentry = context.btxstartlistentry
local ctx_btxstoplistentry = context.btxstoplistentry
local ctx_btxstartcombientry = context.btxstartcombientry
local ctx_btxstopcombientry = context.btxstopcombientry
-local ctx_btxlistsetup = context.btxlistsetup
+
local ctx_btxflushauthor = context.btxflushauthor
+
local ctx_btxsetnoflistentries = context.btxsetnoflistentries
local ctx_btxsetcurrentlistentry = context.btxsetcurrentlistentry
local ctx_btxsetcurrentlistindex = context.btxsetcurrentlistindex
+local ctx_btxsetcount = context.btxsetcount
+local ctx_btxsetconcat = context.btxsetconcat
+
+local ctx_btxcitesetup = context.btxcitesetup
+local ctx_btxsubcitesetup = context.btxsubcitesetup
+local ctx_btxnumberingsetup = context.btxnumberingsetup
+local ctx_btxpagesetup = context.btxpagesetup
+local ctx_btxlistsetup = context.btxlistsetup
+
local trialtypesetting = context.trialtypesetting
languages.data = languages.data or { }
@@ -199,15 +220,13 @@ statistics.register("publications load time", function()
end
end)
-luatex.registerstopactions(function()
- local done = false
+logs.registerfinalactions(function()
+ local done = false
+ local unknown = false
for name, dataset in sortedhash(datasets) do
for command, n in sortedhash(dataset.commands) do
if not done then
- logspushtarget("logfile")
- logsnewline()
- report("start used btx commands")
- logsnewline()
+ logs.startfilelogging(report,"used btx commands")
done = true
end
if isdefined[command] then
@@ -216,14 +235,23 @@ luatex.registerstopactions(function()
report("%-20s %-20s % 5i %s",name,command,n,"KNOWN")
else
report("%-20s %-20s % 5i %s",name,command,n,"unknown")
+ unknown = true
end
end
end
if done then
- logsnewline()
- report("stop used btx commands")
- logsnewline()
- logspoptarget()
+ logs.stopfilelogging()
+ end
+ if unknown and logs.loggingerrors() then
+ logs.starterrorlogging(report,"unknown btx commands")
+ for name, dataset in sortedhash(datasets) do
+ for command, n in sortedhash(dataset.commands) do
+ if not isdefined[command] and not isdefined[upper(command)] then
+ report("%-20s %-20s % 5i %s",name,command,n,"unknown")
+ end
+ end
+ end
+ logs.stoperrorlogging()
end
end)
@@ -334,7 +362,7 @@ local listtolist = nil
do
- local initialize = nil
+ local initialize = nil -- we delay
initialize = function(t)
usedentries = allocate { }
@@ -342,86 +370,75 @@ do
listtocite = allocate { }
listtolist = allocate { }
local names = { }
- local internals = structures.references.internals
local p_collect = (C(R("09")^1) * Carg(1) / function(s,entry) listtocite[tonumber(s)] = entry end + P(1))^0
local nofunique = 0
local nofreused = 0
- for i=1,#internals do
- local entry = internals[i]
- if entry then
- local metadata = entry.metadata
- if metadata then
- local kind = metadata.kind
- if kind == "full" then
- -- reference (in list)
- local userdata = entry.userdata
- if userdata then
- local tag = userdata.btxref
- if tag then
- local set = userdata.btxset or v_default
- local s = usedentries[set]
- if s then
- local u = s[tag]
- if u then
- u[#u+1] = entry
- else
- s[tag] = { entry }
- end
- nofreused = nofreused + 1
- else
- usedentries[set] = { [tag] = { entry } }
- nofunique = nofunique + 1
- end
- -- alternative: collect prev in group
- local bck = userdata.btxbck
- if bck then
- lpegmatch(p_collect,bck,1,entry) -- for s in string.gmatch(bck,"[^ ]+") do listtocite[tonumber(s)] = entry end
- local lst = tonumber(userdata.btxlst)
- if lst then
- listtolist[lst] = entry
- end
+ -- local internals = references.sortedinternals -- todo: when we need it more than once
+ -- for i=1,#internals do -- but currently we don't do this when not
+ -- local entry = internals[i] -- needed anyway so ...
+ local internals = structures.references.internals
+ for i, entry in sortedhash(internals) do
+ local metadata = entry.metadata
+ if metadata then
+ local kind = metadata.kind
+ if kind == "full" then
+ -- reference (in list)
+ local userdata = entry.userdata
+ if userdata then
+ local tag = userdata.btxref
+ if tag then
+ local set = userdata.btxset or v_default
+ local s = usedentries[set]
+ if s then
+ local u = s[tag]
+ if u then
+ u[#u+1] = entry
else
- local int = tonumber(userdata.btxint)
- if int then
- listtocite[int] = entry
- end
+ s[tag] = { entry }
end
- local detail = datasets[set].details[tag]
--- todo: these have to be pluggable
- if detail then
- local author = detail.author
- if author then
- for i=1,#author do
- local a = author[i]
- local s = a.surnames
- if s then
- local c = concat(s,"+")
- local n = names[c]
- if n then
- n[#n+1] = a
- break
- else
- names[c] = { a }
- end
+ nofreused = nofreused + 1
+ else
+ usedentries[set] = { [tag] = { entry } }
+ nofunique = nofunique + 1
+ end
+ -- alternative: collect prev in group
+ local int = tonumber(userdata.btxint)
+ if int then
+ listtocite[int] = entry
+ end
+ local detail = datasets[set].details[tag]
+ -- todo: these have to be pluggable
+ if detail then
+ local author = detail.author
+ if author then
+ for i=1,#author do
+ local a = author[i]
+ local s = a.surnames
+ if s then
+ local c = concat(s,"+")
+ local n = names[c]
+ if n then
+ n[#n+1] = a
+ break
+ else
+ names[c] = { a }
end
end
end
end
end
end
- elseif kind == "btx" or kind == "userdata" then -- will go: kind == "userdata"
- -- list entry (each cite)
- local userdata = entry.userdata
- if userdata then
- local int = tonumber(userdata.btxint)
- if int then
- citetolist[int] = entry
- end
+ end
+ elseif kind == "btx" then
+ -- list entry (each cite)
+ local userdata = entry.userdata
+ if userdata then
+ local int = tonumber(userdata.btxint)
+ if int then
+ citetolist[int] = entry
end
end
end
- else
- -- weird
end
end
for k, v in sortedhash(names) do
@@ -440,7 +457,7 @@ do
end
end
if trace_detail then
- report("%s unique bibentries: %s reused entries",nofunique,nofreused)
+ report("%s unique references, %s reused entries",nofunique,nofreused)
end
initialize = nil
end
@@ -471,7 +488,7 @@ end
local findallused do
local reported = { }
- local finder = publications.finder
+ ----- finder = publications.finder
findallused = function(dataset,reference,internal,forcethem)
local current = datasets[dataset]
@@ -481,6 +498,7 @@ local findallused do
local todo = { }
local okay = { } -- only if mark
local allused = usedentries[dataset] or { }
+ -- local allused = usedentries[dataset] -- only test
local luadata = current.luadata
local details = current.details
local ordered = current.ordered
@@ -585,45 +603,69 @@ local findallused do
end
end
end
- else
- if find then
- tags = { }
- for i=1,#ordered do
- local entry = ordered[i]
- if find(entry) then
- local tag = entry.tag
- local parent = details[tag].parent
- if parent then
- tag = parent
- end
- tags[#tags+1] = tag
- todo[tag] = true
- end
- end
- if #tags == 0 and not reported[reference] then
- tags[1] = reference
- reported[reference] = true
- end
- else
- for i=1,#tags do
- local tag = tags[i]
+ elseif find then
+ tags = { }
+ for i=1,#ordered do
+ local entry = ordered[i]
+ if find(entry) then
+ local tag = entry.tag
local parent = details[tag].parent
if parent then
tag = parent
- tags[i] = tag
- end
- if luadata[tag] then
- todo[tag] = true
- elseif not reported[tag] then
- reported[tag] = true
- report_cite("non-existent entry %a in %a",tag,dataset)
end
+ tags[#tags+1] = tag
+ todo[tag] = true
+ -- okay[#okay+1] = entry -- only test
+ end
+ end
+ if #tags == 0 and not reported[reference] then
+ tags[1] = reference
+ reported[reference] = true
+ end
+ else
+ for i=1,#tags do
+ local tag = tags[i]
+ local parent = details[tag].parent
+ if parent then
+ tag = parent
+ tags[i] = tag
+ end
+ local entry = luadata[tag]
+ if entry then
+ todo[tag] = true
+ -- okay[#okay+1] = entry -- only test
+ elseif not reported[tag] then
+ reported[tag] = true
+ report_cite("non-existent entry %a in %a",tag,dataset)
end
end
end
return okay, todo, tags
end
+ local firstoftwoarguments = context.firstoftwoarguments
+ local secondoftwoarguments = context.secondoftwoarguments
+
+ implement {
+ name = "btxdoifelsematches",
+ arguments = { "string", "string", "string" },
+ actions = function(dataset,tag,expression)
+ local find = publications.finder(dataset,expression)
+ local okay = false
+ if find then
+ local d = datasets[dataset]
+ if d then
+ local e = d.luadata[tag]
+ if e and find(e) then
+ firstoftwoarguments()
+ return
+ end
+ end
+ end
+ secondoftwoarguments()
+ end
+ }
+
end
local function unknowncite(reference)
@@ -1591,6 +1633,8 @@ end
-- lists
+local renderings = { } --- per dataset
+
do
publications.lists = publications.lists or { }
@@ -1604,8 +1648,6 @@ do
-- per rendering
- local renderings = { } --- per dataset
-
setmetatableindex(renderings,function(t,k)
local v = {
list = { },
@@ -1795,6 +1837,7 @@ do
end
local method = specification.method or v_none
local ignored = specification.ignored or ""
+ local filter = specification.filter or ""
rendering.method = method
rendering.ignored = ignored ~= "" and settings_to_set(ignored) or nil
rendering.list = { }
@@ -1819,6 +1862,24 @@ do
end
filtermethod(dataset,rendering,keyword)
local list = rendering.list
+ if list and filter ~= "" then
+ local find = publications.finder(dataset,filter)
+ if find then
+ local luadata = datasets[dataset].luadata
+ local matched = 0
+ for i=1,#list do
+ local found = list[i]
+ local entry = luadata[found[1]]
+ if find(entry) then
+ matched = matched + 1
+ list[matched] = found
+ end
+ end
+ for i=#list,matched + 1,-1 do
+ list[i] = nil
+ end
+ end
+ end
ctx_btxsetnoflistentries(list and #list or 0)
end
@@ -2115,7 +2176,12 @@ do
end
end
- function lists.flushentry(dataset,i,textmode)
+ function lists.flushtag(dataset,i)
+ local li = renderings[dataset].list[i]
+ ctx_btxsettag(li and li[1] or "")
+ end
+
+ function lists.flushentry(dataset,i)
local rendering = renderings[dataset]
local list = rendering.list
local li = list[i]
@@ -2140,6 +2206,18 @@ do
ctx_btxsetcategory(entry.category or "unknown")
ctx_btxsettag(tag)
ctx_btxsetnumber(n)
+ --
+ local citation = citetolist[n]
+ if citation then
+ local references = citation.references
+ if references then
+ local internal = references.internal
+ if internal and internal > 0 then
+ ctx_btxsetinternal(internal)
+ end
+ end
+ end
+ --
if language then
ctx_btxsetlanguage(language)
end
@@ -2163,11 +2241,7 @@ do
ctx_btxsetsuffix(authorsuffix)
end
rendering.userdata = userdata
- if textmode then
- ctx_btxhandlelisttextentry()
- else
- ctx_btxhandlelistentry()
- end
+ ctx_btxhandlelistentry()
ctx_btxstoplistentry()
--
-- context(function()
@@ -2234,6 +2308,7 @@ do
{ "repeated" },
{ "ignored" },
{ "group" },
+ { "filter" },
}
}
}
@@ -2256,6 +2331,12 @@ do
arguments = { "string", "integer" }
}
+ implement {
+ name = "btxflushlisttag",
+ actions = lists.flushtag,
+ arguments = { "string", "integer" }
+ }
+
implement {
name = "btxflushlistcombi",
actions = lists.flushcombi,
@@ -2281,10 +2362,44 @@ do
local citevariants = { }
publications.citevariants = citevariants
+ local function btxvalidcitevariant(dataset,variant)
+ local citevariant = rawget(citevariants,variant)
+ if citevariant then
+ return citevariant
+ end
+ local s = datasets[dataset]
+ if s then
+ s = s.specifications
+ end
+ if s then
+ for k, v in sortedhash(s) do
+ s = k
+ break
+ end
+ end
+ if s then
+ s = specifications[s]
+ end
+ if s then
+ s = s.types
+ end
+ if s then
+ variant = s[variant]
+ if variant then
+ citevariant = rawget(citevariants,variant)
+ end
+ if citevariant then
+ return citevariant
+ end
+ end
+ return citevariants.default
+ end
+
local function btxhandlecite(specification)
local dataset = specification.dataset or v_default
local reference = specification.reference
local variant = specification.variant
+ --
if not variant or variant == "" then
variant = "default"
end
@@ -2323,7 +2438,9 @@ do
--
ctx_btxsetdataset(dataset)
--
- citevariants[variant](specification) -- we always fall back on default
+ local citevariant = btxvalidcitevariant(dataset,variant)
+ --
+ citevariant(specification) -- we always fall back on default
end
local function btxhandlenocite(specification)
@@ -2449,24 +2566,26 @@ do
for i=1,#source do
local entry = source[i]
local current = entry.sortkey -- so we need a sortkey !
- if entry.suffix then
- if not first then
- first, last, firstr, lastr = current, current, entry, entry
- else
- flushrange()
- first, last, firstr, lastr = current, current, entry, entry
- end
- else
- if not first then
- first, last, firstr, lastr = current, current, entry, entry
- elseif current == last + 1 then
- last, lastr = current, entry
+ if type(current) == "number" then
+ if entry.suffix then
+ if not first then
+ first, last, firstr, lastr = current, current, entry, entry
+ else
+ flushrange()
+ first, last, firstr, lastr = current, current, entry, entry
+ end
else
- flushrange()
- first, last, firstr, lastr = current, current, entry, entry
+ if not first then
+ first, last, firstr, lastr = current, current, entry, entry
+ elseif current == last + 1 then
+ last, lastr = current, entry
+ else
+ flushrange()
+ first, last, firstr, lastr = current, current, entry, entry
+ end
end
+ tags[#tags+1] = entry.tag
end
- tags[#tags+1] = entry.tag
end
if first and last then
flushrange()
@@ -2519,15 +2638,14 @@ do
local setter = specification.setter
local compressor = specification.compressor
local method = specification.method
+ local varfield = specification.varfield
--
local reference = publications.parenttag(dataset,reference)
--
local found, todo, list = findallused(dataset,reference,internal,method == v_text or method == v_always) -- also when not in list
--
--- inspect(found)
--- inspect(todo)
--- inspect(list)
if not found or #found == 0 then
+-- if not list or #list == 0 then
report("no entry %a found in dataset %a",reference,dataset)
elseif not setup then
report("invalid reference for %a",reference)
@@ -2549,6 +2667,7 @@ do
language = ldata.language,
dataset = dataset,
tag = tag,
+ varfield = varfield,
-- combis = entry.userdata.btxcom,
-- luadata = ldata,
}
@@ -2579,6 +2698,11 @@ do
ctx_btxsettag(tag)
ctx_btxsetcategory(entry.category or "unknown")
--
+ local language = entry.language
+ if language then
+ ctx_btxsetlanguage(language)
+ end
+ --
if lefttext then local text = lefttext [i] ; if text and text ~= "" then ctx_btxsetlefttext (text) end end
if righttext then local text = righttext[i] ; if text and text ~= "" then ctx_btxsetrighttext(text) end end
if before then local text = before [i] ; if text and text ~= "" then ctx_btxsetbefore (text) end end
@@ -2589,11 +2713,12 @@ do
local bl = listtocite[currentcitation]
if bl then
-- we refer to a coming list entry
- ctx_btxsetinternal(bl.references.internal or "")
+ bl = bl.references.internal
else
-- we refer to a previous list entry
- ctx_btxsetinternal(entry.internal or "")
+ bl = entry.internal
end
+ ctx_btxsetinternal(bl and bl > 0 and bl or "")
end
local language = entry.language
if language then
@@ -2665,11 +2790,15 @@ do
local function simplegetter(first,last,field,specification)
local value = first[field]
if value then
- ctx_btxsetfirst(value)
- if last then
- ctx_btxsetsecond(last[field])
+ if type(value) == "string" then
+ ctx_btxsetfirst(value)
+ if last then
+ ctx_btxsetsecond(last[field])
+ end
+ return true
+ else
+ report("missing data type definition for %a",field)
end
- return true
end
end
@@ -2982,11 +3111,6 @@ do
return keysorter(b,a)
end
- local currentbtxciteauthor = function()
- context.currentbtxciteauthor()
- return true -- needed?
- end
-
local function authorcompressor(found,specification)
-- HERE
if specification.sorttype == v_normal then
@@ -3080,10 +3204,6 @@ do
ctx_btxstartciteauthor()
local tag = entry.tag
ctx_btxsettag(tag)
- -- local currentcitation = markcite(entry.dataset,tag)
- -- ctx_btxsetbacklink(currentcitation)
- -- local bl = listtocite[currentcitation]
- -- ctx_btxsetinternal(bl and bl.references.internal or "")
ctx_btxsetfirst(entry[key] or "") -- f_missing(tag)
if suffix then
ctx_btxsetsuffix(entry.suffix)
@@ -3098,9 +3218,13 @@ do
local partialinteractive = false
+ local currentbtxciteauthor = function()
+ context.currentbtxciteauthorbyfield()
+ return true -- needed?
+ end
+
local function authorgetter(first,last,key,specification) -- only first
- -- ctx_btxsetfirst(first.author) -- unformatted
- -- ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
+ ctx_btxsetauthorfield(first.varfield or "author")
if first.type == "author" then
ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
else
@@ -3127,15 +3251,16 @@ do
return true
end
- -- author
+ -- author (the varfield hack is for editor and translator i.e author type)
local function setter(data,dataset,tag,entry)
- data.author, data.field, data.type = getcasted(dataset,tag,"author")
+ data.author, data.field, data.type = getcasted(dataset,tag,data.varfield or "author")
data.sortkey = text and lpegmatch(numberonly,text)
data.authorhash = getdetail(dataset,tag,"authorhash") -- todo let getcasted return
end
local function getter(first,last,_,specification)
+ ctx_btxsetauthorfield(specification.varfield or "author")
if first.type == "author" then
ctx_btxsetfirst(currentbtxciteauthor) -- formatter (much slower)
else
@@ -3150,6 +3275,7 @@ do
setup = "author",
setter = setter,
getter = getter,
+ varfield = presets.variant or "author",
compressor = authorcompressor,
})
end
@@ -3239,7 +3365,10 @@ do
local function btxlistvariant(dataset,block,tag,variant,listindex)
local action = listvariants[variant] or listvariants.default
if action then
- action(dataset,block,tag,variant,tonumber(listindex) or 0)
+ listindex = tonumber(listindex)
+ if listindex then
+ action(dataset,block,tag,variant,listindex)
+ end
end
end
@@ -3299,33 +3428,6 @@ do
ctx_btxnumberingsetup(variant or "short")
end
- function listvariants.page(dataset,block,tag,variant,listindex)
- local rendering = renderings[dataset]
- local specification = rendering.list[listindex]
- for i=3,#specification do
- local backlink = tonumber(specification[i])
- if backlink then
- local citation = citetolist[backlink]
- if citation then
- local references = citation.references
- if references then
- local internal = references.internal
- local realpage = references.realpage
- if internal and realpage then
- ctx_btxsetconcat(i-2)
- ctx_btxsetfirst(realpage)
- ctx_btxsetsecond(backlink)
- if trace_detail then
- report("expanding %a list setup %a","page",variant)
- end
- ctx_btxlistsetup(variant)
- end
- end
- end
- end
- end
- end
-
end
-- a helper
@@ -3336,7 +3438,7 @@ do
-- local lpegmatch = lpeg.match
local splitter = lpeg.tsplitat(":")
- interfaces.implement {
+ implement {
name = "checkinterfacechain",
arguments = { "string", "string" },
actions = function(str,command)
diff --git a/tex/context/base/mkiv/publ-ini.mkiv b/tex/context/base/mkiv/publ-ini.mkiv
index 5f9aaa692..9f970547d 100644
--- a/tex/context/base/mkiv/publ-ini.mkiv
+++ b/tex/context/base/mkiv/publ-ini.mkiv
@@ -45,15 +45,15 @@
\writestatus{loading}{ConTeXt Publication Support / Initialization}
-\registerctxluafile{publ-dat}{1.001}
-\registerctxluafile{publ-ini}{1.001}
-\registerctxluafile{publ-sor}{1.001}
-\registerctxluafile{publ-aut}{1.001}
-\registerctxluafile{publ-usr}{1.001}
-\registerctxluafile{publ-oth}{1.001} % this could become an option
-\registerctxluafile{publ-fnd}{1.001} % new method (for the moment only local)
-\registerctxluafile{publ-jrn}{1.001}
-\registerctxluafile{publ-reg}{1.001}
+\registerctxluafile{publ-dat}{}
+\registerctxluafile{publ-ini}{}
+\registerctxluafile{publ-sor}{}
+\registerctxluafile{publ-aut}{}
+\registerctxluafile{publ-usr}{}
+\registerctxluafile{publ-oth}{} % this could become an option
+\registerctxluafile{publ-fnd}{} % new method (for the moment only local)
+\registerctxluafile{publ-jrn}{}
+\registerctxluafile{publ-reg}{}
\unprotect
@@ -68,7 +68,6 @@
\def\s!btxset {btxset}
\def\s!btxref {btxref}
\def\s!btxint {btxint}
-\def\s!btxbck {btxbck}
\def\s!btxltx {btxltx}
\def\s!btxrtx {btxrtx}
\def\s!btxatx {btxatx}
@@ -224,8 +223,7 @@
\installcommandhandler \??btxregister {btxregister} \??btxregister
\installcommandhandler \??btxrendering {btxrendering} \??btxrendering
-\let\currentbtxcitealternative\empty
-
+\let\currentbtxcitealternative \empty
\let\currentbtxspecificationfallback\empty
\unexpanded\def\setbtxparameterset#1#2%
@@ -356,15 +354,6 @@
\unexpanded\def\definebtxcommand#1% {body} #1..#n{body}
{\setuvalue{\??btxcommand\csstring#1}}%
-% tracing
-
-\installtextracker
- {publications.crosslinks}
- {\let\btx_trace_list_cross\strc_references_tracer}
- {\let\btx_trace_list_cross\gobbletwoarguments}
-
-\let\btx_trace_list_cross\gobbletwoarguments
-
% access
\let\currentbtxtag \empty
@@ -424,13 +413,11 @@
%D Variables:
\let\currentbtxbacklink \empty \unexpanded\def\btxsetbacklink {\def\currentbtxbacklink}
-\let\currentbtxbacktrace \empty \unexpanded\def\btxsetbacktrace {\def\currentbtxbacktrace}
\let\currentbtxcategory \empty \unexpanded\def\btxsetcategory {\def\currentbtxcategory}
\let\currentbtxcombis \empty \unexpanded\def\btxsetcombis {\def\currentbtxcombis}
\let\currentbtxdataset \empty \unexpanded\def\btxsetdataset {\def\currentbtxdataset}
\let\currentbtxfirst \empty \unexpanded\def\btxsetfirst {\def\currentbtxfirst}
\let\currentbtxsecond \empty \unexpanded\def\btxsetsecond {\def\currentbtxsecond}
-%let\currentbtxthird \empty \unexpanded\def\btxsetthird {\def\currentbtxthird}
\let\currentbtxsuffix \empty \unexpanded\def\btxsetsuffix {\def\currentbtxsuffix}
\let\currentbtxinternal \empty \unexpanded\def\btxsetinternal {\def\currentbtxinternal}
\let\currentbtxlefttext \empty \unexpanded\def\btxsetlefttext {\def\currentbtxlefttext}
@@ -440,13 +427,15 @@
\let\currentbtxlanguage \empty \unexpanded\def\btxsetlanguage {\def\currentbtxlanguage}
\let\currentbtxtag \empty \unexpanded\def\btxsettag {\def\currentbtxtag}
\let\currentbtxnumber \empty \unexpanded\def\btxsetnumber {\def\currentbtxnumber}
-\let\currentbtxauthorvariant\v!normal \unexpanded\def\btxsetauthorvariant{\def\currentbtxauthorvariant}
+\let\currentbtxfirstinternal\empty \unexpanded\def\btxsetfirstinternal{\def\currentbtxfirstinternal}
+\let\currentbtxlastinternal \empty \unexpanded\def\btxsetlastinternal {\def\currentbtxlastinternal}
-\let\currentbtxfirstnames \empty \unexpanded\def\btxsetfirstnames{\let\currentbtxfirstnames\currentbtxfirstnames_indeed}
-\let\currentbtxinitials \empty \unexpanded\def\btxsetinitials {\let\currentbtxinitials \currentbtxinitials_indeed }
-\let\currentbtxjuniors \empty \unexpanded\def\btxsetjuniors {\let\currentbtxjuniors \currentbtxjuniors_indeed }
-\let\currentbtxsurnames \empty \unexpanded\def\btxsetsurnames {\let\currentbtxsurnames \currentbtxsurnames_indeed }
-\let\currentbtxvons \empty \unexpanded\def\btxsetvons {\let\currentbtxvons \currentbtxvons_indeed }
+\let\currentbtxauthorvariant\v!normal \unexpanded\def\btxsetauthorvariant{\def\currentbtxauthorvariant}
+\let\currentbtxfirstnames \empty \unexpanded\def\btxsetfirstnames {\let\currentbtxfirstnames\currentbtxfirstnames_indeed}
+\let\currentbtxinitials \empty \unexpanded\def\btxsetinitials {\let\currentbtxinitials \currentbtxinitials_indeed }
+\let\currentbtxjuniors \empty \unexpanded\def\btxsetjuniors {\let\currentbtxjuniors \currentbtxjuniors_indeed }
+\let\currentbtxsurnames \empty \unexpanded\def\btxsetsurnames {\let\currentbtxsurnames \currentbtxsurnames_indeed }
+\let\currentbtxvons \empty \unexpanded\def\btxsetvons {\let\currentbtxvons \currentbtxvons_indeed }
\newconstant\currentbtxoverflow \unexpanded\def\btxsetoverflow #1{\currentbtxoverflow #1\relax}
\newconstant\currentbtxconcat \unexpanded\def\btxsetconcat #1{\currentbtxconcat #1\relax}
@@ -461,10 +450,8 @@
\unexpanded\def\currentbtxsurnames_indeed {\clf_btxcurrentsurnames \numexpr\currentbtxauthorindex\relax}
\unexpanded\def\currentbtxvons_indeed {\clf_btxcurrentvons \numexpr\currentbtxauthorindex\relax}
-\let\currentbtxfirstpage \empty \unexpanded\def\btxsetfirstpage #1{\def\currentbtxfirstpage{\btx_page_number{#1}}}
-\let\currentbtxlastpage \empty \unexpanded\def\btxsetlastpage #1{\def\currentbtxlastpage {\btx_page_number{#1}}}
-\let\currentbtxfirstinternal\empty \unexpanded\def\btxsetfirstinternal {\def\currentbtxfirstinternal}
-\let\currentbtxlastinternal \empty \unexpanded\def\btxsetlastinternal {\def\currentbtxlastinternal}
+\let\currentbtxfirstpage \empty \unexpanded\def\btxsetfirstpage#1{\def\currentbtxfirstpage{\btx_page_number{#1}}}
+\let\currentbtxlastpage \empty \unexpanded\def\btxsetlastpage #1{\def\currentbtxlastpage {\btx_page_number{#1}}}
\def\currentbtxauthorvariant{normal}
@@ -477,7 +464,6 @@
\let\currentbtxbefore \empty
\let\currentbtxafter \empty
\let\currentbtxbacklink \empty
- \let\currentbtxbacktrace\empty
\let\currentbtxlanguage \empty
\let\currentbtxsuffix \empty
%\let\currentbtxdataset \empty % will always be set
@@ -494,7 +480,6 @@
\let \currentbtxbefore \empty
\let \currentbtxafter \empty
\let \currentbtxbacklink \empty
- \let \currentbtxbacktrace \empty % not used here
\let \currentbtxlanguage \empty
%\let \currentbtxdataset \empty % will always be set, beware of local reset ~
%\let \currentbtxtag \empty % will always be set, beware of local reset ~
@@ -576,6 +561,7 @@
[\c!prefixstopper=:,
\c!state=\v!start,
\c!alternative=a,
+ \c!interaction=\v!none,
%\c!alternative=\v!paragraph,
%\c!width=\v!auto,
%\c!distance=\emwidth,
@@ -635,7 +621,7 @@
\btxparameter\c!right}%
\endgroup}
-\unexpanded\def\btxpagesetup#1% there will b eno left|right|command|style at this inner level
+\unexpanded\def\btxpagesetup#1% there will be no left|right|command|style at this inner level
{\begingroup
\publ_fast_setup\plusfive\s!list\s!page
\endgroup
@@ -653,6 +639,22 @@
% end of page stuff
+\unexpanded\def\btxflushlisttext
+ {\begingroup
+ \usebtxstyleandcolor\c!style\c!color
+ \ignorespaces
+ \publ_fast_setup\plusfour\s!list\currentbtxcategory
+ \removeunwantedspaces
+ \endgroup}
+
+\unexpanded\def\btxflushlistcombis
+ {\begingroup
+ \processcommacommand[\currentbtxcombis]\btx_entry_inject_combi % maybe in lua
+ \endgroup}
+
+\def\btx_entry_inject_list_text
+ {\publ_fast_setup\plusfour\s!list\s!text}
+
\unexpanded\def\btx_entry_inject
{\begingroup
\redoconvertfont % see (**) in strc-lst, this will become an configuration option
@@ -661,19 +663,7 @@
\ifconditional\c_btx_list_texts
\currentbtxbefore
\fi
- \begingroup
- \usebtxstyleandcolor\c!style\c!color
- \ignorespaces
- \publ_fast_setup\plusfour\s!list\currentbtxcategory
- \removeunwantedspaces
- \endgroup
- \ifx\currentbtxcombis\empty \else
- \btxrenderingparameter\c!separator
- % maybe move this loop to lua
- \begingroup
- \processcommacommand[\currentbtxcombis]\btx_entry_inject_combi
- \endgroup
- \fi
+ \btx_entry_inject_list_text
\ifconditional\c_btx_list_pages
\btx_entry_inject_pages
\fi
@@ -723,11 +713,12 @@
% \removeunwantedspaces
% \endgroup}
-\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
\unexpanded\def\placebtxrendering {\dodoubleempty\publ_place_list_standard}
+\unexpanded\def\completebtxrendering{\dodoubleempty\publ_place_list_complete}
+\unexpanded\def\flushbtxrendering {\dodoubleempty\publ_place_list_special }
-\let\completelistofpublications\completebtxrendering
-\let\placelistofpublications \placebtxrendering
+\let\completelistofpublications\completebtxrendering % for old times sake
+\let\placelistofpublications \placebtxrendering % for old times sake
\newtoks\everybtxlistrendering
@@ -743,6 +734,8 @@
%
\iflocation
\letinteractionparameter\c!style\empty
+% \letinteractionparameter\c!color\empty
+% \letinteractionparameter\c!contrastcolor\empty
\fi
\to \everybtxlistrendering
@@ -774,27 +767,27 @@
\let\btxdoifsameaspreviouselse \btxdoifelsesameasprevious
\let\btxdoifcombiinlistelse \btxdoifelsecombiinlist
-\def\publ_place_list_indeed#1[#2][#3]%
+\def\publ_place_list_indeed#1#2[#3][#4]%
{\begingroup
\ifsecondargument
% [rendering] [settings]
- \edef\currentbtxrendering{#2}%
- \setupcurrentbtxrendering[#3]%
+ \edef\currentbtxrendering{#3}%
+ \setupcurrentbtxrendering[#4]%
\edef\p_specification{\btxrenderingparameter\c!specification}%
\ifx\p_specification\empty\else
\let\currentbtxspecification\p_specification
\fi
\else\iffirstargument
- \doifelseassignment{#2}
+ \doifelseassignment{#3}
{% [settings]
\let\currentbtxrendering\currentbtxspecification
- \setupcurrentbtxrendering[#2]%
+ \setupcurrentbtxrendering[#3]%
\edef\p_specification{\btxrenderingparameter\c!specification}%
\ifx\p_specification\empty\else
\let\currentbtxspecification\p_specification
\let\currentbtxrendering\currentbtxspecification % tricky
\fi}
- {\edef\currentbtxrendering{#2}%
+ {\edef\currentbtxrendering{#3}%
\edef\p_specification{\btxrenderingparameter\c!specification}%
\ifx\p_specification\empty\else
\let\currentbtxspecification\p_specification
@@ -815,13 +808,11 @@
\ifx\currentbtxrendering\empty
\setbtxrendering % hm
\fi
- \btxrenderingparameter\c!before
\edef\currentbtxdataset{\btxrenderingparameter\c!dataset}%
\uselanguageparameter\btxdatasetparameter % new
\setbtxlist
\the\everystructurelist
\the\everysetupbtxlistplacement
- \forgetall
% why not pass this with collect .. todo
% here we just collect items
\clf_btxcollectlistentries
@@ -835,40 +826,135 @@
repeated {\btxrenderingparameter\c!repeat}%
ignored {\btxrenderingparameter\c!ignore}%
group {\btxrenderingparameter\c!group}%
+ filter {\btxrenderingparameter\c!filter}%
\relax
\ifnum\nofbtxlistentries>\zerocount
- \startpacked[\v!blank]%
- % sorting and so
- \clf_btxpreparelistentries{\currentbtxdataset}% could be put in collect
- % next we analyze the width
- \ifx\currentbtxnumbering\empty \else
- \edef\p_width{\listparameter\c!width}%
- \ifx\p_width\v!auto
- \setbox\scratchbox\vbox \bgroup
- \settrialtypesetting
- \clf_btxfetchlistentries{\currentbtxdataset}%
- \egroup
- \d_publ_number_width\wd\scratchbox
- \letlistparameter\c!width\d_publ_number_width
+ \forgetall
+ \btxrenderingparameter\c!before
+ \ifconditional#2\relax
+ \edef\p_command{\btxrenderingparameter\c!command}%
+ \ifx\p_command\empty
+ \edef\p_setups{\btxrenderingparameter\c!setups}%
+ \ifx\p_setups\empty
+ \else
+ \directsetup{\p_setups}%
+ \fi
+ \else
+ \expandafter\p_command\expandafter{\number\nofbtxlistentries}\relax
\fi
+ \else
+ \startpacked[\v!blank]%
+ % sorting and so
+ \clf_btxpreparelistentries{\currentbtxdataset}% could be put in collect
+ % next we analyze the width
+ \ifx\currentbtxnumbering\empty \else
+ \edef\p_width{\listparameter\c!width}%
+ \ifx\p_width\v!auto
+ \setbox\scratchbox\vbox \bgroup
+ \settrialtypesetting
+ \clf_btxfetchlistentries{\currentbtxdataset}%
+ \egroup
+ \d_publ_number_width\wd\scratchbox
+ \letlistparameter\c!width\d_publ_number_width
+ \fi
+ \fi
+ \doifelse{\listparameter\c!prefix}\v!yes\settrue\setfalse\c_publ_prefixed
+ % this actually typesets them, we loop here as otherwise the whole
+ % bunch gets flushed at once
+ \dorecurse\nofbtxlistentries
+ {\let\currentbtxlistentry\recurselevel
+ \clf_btxflushlistentry{\currentbtxdataset}\currentbtxlistentry\relax}%
+ \stoppacked
\fi
- \doifelse{\listparameter\c!prefix}\v!yes\settrue\setfalse\c_publ_prefixed
- % this actually typesets them, we loop here as otherwise the whole
- % bunch gets flushed at once
- \dorecurse\nofbtxlistentries
- {\let\currentbtxlistentry\recurselevel
- \clf_btxflushlistentry{\currentbtxdataset}\currentbtxlistentry\relax}%
- \stoppacked
+ \btxrenderingparameter\c!after
\fi
- \btxrenderingparameter\c!after
- \global\advance\btxblock\plusone
\ifconditional#1\relax
\stopnamedsection
\fi
+ \global\advance\btxblock\plusone
\endgroup}
-\def\publ_place_list_complete{\publ_place_list_indeed\conditionaltrue}
-\def\publ_place_list_standard{\publ_place_list_indeed\conditionalfalse}
+\def\publ_place_list_standard{\publ_place_list_indeed\conditionalfalse\conditionalfalse}
+\def\publ_place_list_complete{\publ_place_list_indeed\conditionaltrue \conditionalfalse}
+\def\publ_place_list_special {\publ_place_list_indeed\conditionalfalse\conditionaltrue}
+
+%D This is somewhat special (for Alan of course):
+%D
+%D \starttyping
+%D % #1 is number of entries
+%D
+%D \starttexdefinition unexpanded btx:for:alan:wrapper #1
+%D \bTABLE
+%D % we can have a command or setups
+%D \flushbtxentries[command=\texdefinition{btx:for:alan:content}]
+%D \eTABLE
+%D \stoptexdefinition
+%D
+%D % #1 is tag
+%D
+%D \starttexdefinition unexpanded btx:for:alan:content #1
+%D \bTR
+%D \bTD
+%D \btxsettag{#1}
+%D \btxfield{name}
+%D \eTD
+%D \eTR
+%D \stoptexdefinition
+%D
+%D % we can have a command or setups
+%D
+%D \flushbtxrendering [method=dataset,command=\texdefinition{btx:for:alan:wrapper}]
+%D \stoptyping
+%D
+%D Because we want to be ungrouped we use a special loop construct.
+
+\unexpanded\def\btxsetlisttag#1%
+ {\clf_btxflushlisttag{\currentbtxdataset}#1\relax}
+
+\newcount\c_btx_list_index
+\let\m_btx_list_action\empty
+
+\def\publ_flush_list_step_command
+ {\btxsetlisttag{\c_btx_list_index}
+ \expandafter\m_btx_list_action\expandafter{\currentbtxtag}%
+ \ifnum\c_btx_list_index<\nofbtxlistentries
+ \advance\c_btx_list_index\plusone
+ \expandafter\publ_flush_list_step_command
+ \else
+ \glet\m_btx_list_action\relax
+ \fi}
+
+\def\publ_flush_list_step_setup
+ {\btxsetlisttag{\c_btx_list_index}
+ \directsetup{\m_btx_list_action}%
+ \ifnum\c_btx_list_index<\nofbtxlistentries
+ \advance\c_btx_list_index\plusone
+ \expandafter\publ_flush_list_step_setup
+ \else
+ \glet\m_btx_list_action\relax
+ \fi}
+
+\unexpanded\def\flushbtxentries[#1]%
+ {\begingroup
+ \getdummyparameters[\c!command=,\c!setups=,#1]%
+ \xdef\m_btx_list_action{\dummyparameter\c!command}%
+ \ifx\m_btx_list_action\empty
+ \xdef\m_btx_list_action{\dummyparameter\c!setups}%
+ \ifx\m_btx_list_action\empty
+ \endgroup
+ \c_btx_list_index\zerocount
+ \else
+ \endgroup
+ \c_btx_list_index\plusone
+ \doubleexpandafter\publ_flush_list_step_command
+ \fi
+ \else
+ \endgroup
+ \c_btx_list_index\plusone
+ \expandafter\publ_flush_list_step_command
+ \fi}
+
+%D So far.
\def\currentbtxblock{\number\btxblock}
@@ -924,7 +1010,7 @@
\unexpanded\def\btx_reference_indeed
{\begingroup
- % redundantm will go away:
+ % redundant will go away:
\setbtxparameterset{\c!list:\s!numbering}\currentbtxnumbering
%
\ifx\currentbtxnumbering\empty
@@ -976,16 +1062,16 @@
\endgroup}
\def\btx_list_reference_inject_now
- {\btx_trace_list_cross\empty\currentbtxbacktrace
- \strc_references_direct_full_user
+ {\strc_references_direct_full_user
{\ifx\currentbtxdataset\v!default\else\s!btxset=\currentbtxdataset,\fi%
\s!btxref=\currentbtxtag,%
\s!btxspc=\currentbtxspecification,%
\s!btxlst=\number\c_btx_list_reference,% check if needed
- %\ifx\currentbtxcombis\empty\else\s!btxcom={\currentbtxcombis},\fi%
- \ifx\currentbtxbefore\empty\else\s!btxbtx={\currentbtxbefore},\fi%
- \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
- \ifx\currentbtxbacklink\currentbtxbacktrace\s!btxint=\currentbtxbacklink\else\s!btxbck=\currentbtxbacktrace\fi}%
+ %\ifx\currentbtxcombis \empty\else\s!btxcom={\currentbtxcombis},\fi%
+ \ifx\currentbtxbefore \empty\else\s!btxbtx={\currentbtxbefore},\fi%
+ \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
+ \ifx\currentbtxbacklink\empty\else\s!btxint=\number\currentbtxbacklink\fi
+ }%
{\s!btx::\v!list::\number\c_btx_list_reference}%
{\currentbtxnumber}}
@@ -998,27 +1084,26 @@
\iftrialtypesetting \else
\ifx\currentbtxbacklink\empty
% can be made empty when combining author / year
- \else
+ \else\ifnum\currentbtxbacklink>\zerocount
\btx_cite_reference_inject_indeed
\settrue\c_btx_cite_reference_injected
- \fi
+ \fi\fi
\fi
\fi}
\newtoks\t_btx_reference_inject
\def\btx_cite_reference_inject_indeed
- {\btx_trace_list_cross\currentbtxbacklink\empty
- \the\t_btx_reference_inject
+ {\the\t_btx_reference_inject
\strc_lists_inject_direct % todo: make like \btx_list_reference_inject_now with { }
[\s!btx]%
[\c!type=\s!btx]% \c!location=\v!none
[\ifx\currentbtxdataset\v!default\else\s!btxset=\currentbtxdataset,\fi%
\s!btxref=\currentbtxtag,%
- %\ifx\currentbtxcombis\empty\else\s!btxcom={\currentbtxcombis},\fi%
- \ifx\currentbtxbefore\empty\else\s!btxbtx={\currentbtxbefore},\fi%
- \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
- \s!btxint=\number\currentbtxbacklink
+ %\ifx\currentbtxcombis \empty\else\s!btxcom={\currentbtxcombis},\fi%
+ \ifx\currentbtxbefore \empty\else\s!btxbtx={\currentbtxbefore},\fi%
+ \ifx\currentbtxafter \empty\else\s!btxatx={\currentbtxafter },\fi%
+ \ifx\currentbtxbacklink\empty\else\s!btxint=\number\currentbtxbacklink,\fi
\ifx\currentbtxciteuservariables\empty\else,\currentbtxciteuservariables\fi]}
\def\currentbtxuservariable #1{\clf_btxuservariable {\currentbtxdataset}{#1}}
@@ -1055,6 +1140,9 @@
\def\btx_remap_author[#1][#2]%
{\clf_btxremapauthor{#1}{#2}}
+\unexpanded\def\btxshowauthorremapping
+ {\clf_btxshowauthorremapping}
+
\unexpanded\def\btxflushauthor
{\doifelsenextoptionalcs\btx_flush_author_yes\btx_flush_author_nop}
@@ -1084,6 +1172,7 @@
etaldisplay {\btxparameter\c!etaldisplay}%
etaloption {\btxparameter\c!etaloption}%
symbol {\btxparameter{\c!stopper:initials}}%
+ connector {\btxparameter{\c!connector:initials}}%
}%
\relax
\endgroup}
@@ -1096,7 +1185,11 @@
\unexpanded\def\btxflushauthorinverted {\btx_flush_author{inverted}} % #1
\unexpanded\def\btxflushauthorinvertedshort{\btx_flush_author{invertedshort}} % #1
-\unexpanded\def\currentbtxciteauthor % always author
+\let\currentbtxauthorfield\s!author
+
+\unexpanded\def\btxsetauthorfield#1{\edef\currentbtxauthorfield{#1}}
+
+\unexpanded\def\currentbtxciteauthorbyfield
{\begingroup
%\setbtxparameterset\s!cite\s!author
% the alternatives inherit from cite:author
@@ -1105,7 +1198,7 @@
\clf_btxauthor
{\currentbtxdataset}%
{\currentbtxtag}%
- {\s!author}%
+ {\currentbtxauthorfield}%
{%
combiner {\btxparameter\c!authorconversion}%
kind {cite}%
@@ -1117,6 +1210,10 @@
\relax
\endgroup}
+\unexpanded\def\currentbtxciteauthor
+ {\let\currentbtxauthorfield\s!author
+ \currentbtxciteauthorbyfield} % always author
+
\unexpanded\def\btxstartauthor#1#2#3% a state > 0 signals that some authors can clash
{\begingroup
\currentbtxauthorindex#1\relax
@@ -1140,43 +1237,50 @@
% \btxflushauthor{author}
% \btxflushauthor{editor}
-%
-% \btxflushauthor[name]{author}
-% \btxflushauthor[normal]{author}
-% \btxflushauthor[normalshort]{author}
-% \btxflushauthor[inverted]{author}
-% \btxflushauthor[invertedshort]{author}
% Interaction
+%
+% Because we have more complex entries in lists we don't use the normal list
+% interaction features.
\newconditional\btxinteractive
-\newconditional\btx_interactive
+\newconditional\btxinteractivenumber
+\newconditional\btxinteractivetext
+\newconditional\btxinteractivepage
+
+\let\currentbtxinteraction\empty
+
+\installcorenamespace{btxinteraction}
-% of maybe modes?
+\setvalue{\??btxinteraction\v!number}{\settrue\btxinteractivenumber}
+\setvalue{\??btxinteraction\v!text }{\settrue\btxinteractivetext}
+\setvalue{\??btxinteraction\v!page }{\settrue\btxinteractivepage}
+\setvalue{\??btxinteraction\v!all }{\settrue\btxinteractivenumber
+ \settrue\btxinteractivetext
+ \settrue\btxinteractivepage}
+
+% \setupbtx[interaction=page] % or text or number or all
+% \setupbtxrendering[pagestate=start]
\appendtoks
\iflocation
- \edef\p_interaction{\btxparameter\c!interaction}%
- \ifx\p_interaction\v!stop
+ \edef\currentbtxinteraction{\btxparameter\c!interaction}%
+ \ifx\currentbtxinteraction\v!stop
\setfalse\btxinteractive
\else
+ \let\structurelistlocation\empty
\settrue\btxinteractive
- \ifx\p_interaction\v!all
- \settrue\btx_interactive
- \else
- \setfalse\btx_interactive
- \fi
+ \begincsname\??btxinteraction\currentbtxinteraction\endcsname
\fi
\else
\setfalse\btxinteractive
- \setfalse\btx_interactive
\fi
\to \everysetupbtxlistplacement
\appendtoks
\iflocation
- \edef\p_interaction{\btxparameter\c!interaction}%
- \ifx\p_interaction\v!stop
+ \edef\currentbtxinteraction{\btxparameter\c!interaction}%
+ \ifx\currentbtxinteraction\v!stop
\setfalse\btxinteractive
\else
\settrue\btxinteractive
@@ -1223,6 +1327,7 @@
\let\p_publ_cite_righttext\empty
\let\currentbtxciteuservariables\empty
+\let\currentbtxcitealternative \empty
\unexpanded\def\btxhybridcite % so one can alias the old
{\dontleavehmode
@@ -1262,6 +1367,11 @@
\let\currentbtxcitetag\p_reference
\edef\currentbtxciteuservariables{#2}%
\fi
+ \edef\p_specification{\dummyparameter\c!specification}%
+ \ifx\p_specification\empty
+ \else
+ \let\currentbtxspecification\p_specification
+ \fi
\edef\p_alternative{\dummyparameter\c!alternative}%
\ifx\p_alternative\empty
\setbtxparametersetroot\s!cite
@@ -1330,6 +1440,7 @@
\def\publ_cite_handle_variant_indeed[#1]%
{\letbtxparameter\c!alternative\currentbtxcitealternative
\edef\currentbtxreference{#1}%
+ \saverunningstyleandcolor
\usebtxstyleandcolor\c!style\c!color
\uselanguageparameter\btxdatasetparameter % new
\btxparameter\c!left
@@ -1393,8 +1504,9 @@
\unexpanded\def\citation {\doifelsenextoptionalcs\btxlistcitation \btxdirectlistcite}
\unexpanded\def\nocitation{\doifelsenextoptionalcs\btxhiddencitation\btxdirecthiddencite}
-\let\cite \citation
-\let\nocite\nocitation
+\let\cite \citation
+\let\nocite \nocitation
+\let\usecitation\nocitation
\unexpanded\def\publ_entry_citation {\doifelsenextoptionalcs\btxlistcitation \btxdirectlistcite}
\unexpanded\def\publ_entry_nocitation{\doifelsenextoptionalcs\btxhiddencitation\btxdirecthiddencite}
@@ -1551,6 +1663,7 @@
\unexpanded\def\btxstartsubcite#1%
{\begingroup
\btx_reset_cite % todo: limited set
+ % \saverunningstyleandcolor % let's see when Alan needs it
\def\currentbtxcitealternative{#1}%
\setbtxparameterset\s!cite\currentbtxcitealternative
\usebtxstyleandcolor\c!style\c!color
@@ -1783,6 +1896,19 @@
\unexpanded\def\btxusecommand[#1]#2% using #2 permits space after []
{\namedbtxparameter{#1}\c!command{#2}}
+\unexpanded\def\startbtxrunningstyleandcolor
+ {\dontleavehmode
+ \begingroup
+ \restorerunningstyleandcolor}
+
+\unexpanded\def\stopbtxrunningstyleandcolor
+ {\endgroup}
+
+%D Maybe handy:
+
+\unexpanded\def\btxdoifelsematches#1#2#3%
+ {\clf_btxdoifelsematches{#1}{#2}{#3}}
+
%D Defaults:
\setupbtxrendering
@@ -1801,7 +1927,7 @@
\c!pagestate=\v!stop,
\c!textstate=\v!start,
\c!width=\v!auto,
- \c!separator={\btxsemicolon},
+ \c!separator={\removepunctuation;\space},
\c!distance=1.5\emwidth]
% Quite some interpunction and labels are the same of at least consistent within
@@ -1860,4 +1986,13 @@
\loadbtxdefinitionfile
[\s!default]
+%D Delayed loading:
+
+\fetchruntimecommand \showbtxdatasetfields \f!publ_tra
+\fetchruntimecommand \showbtxdatasetcompleteness \f!publ_tra
+\fetchruntimecommand \showbtxdatasetauthors \f!publ_tra
+\fetchruntimecommand \showbtxhashedauthors \f!publ_tra
+\fetchruntimecommand \showbtxfields \f!publ_tra
+\fetchruntimecommand \showbtxtables \f!publ_tra
+
\protect
diff --git a/tex/context/base/mkiv/publ-tra.lua b/tex/context/base/mkiv/publ-tra.lua
index b3d40be61..81bbc2fd3 100644
--- a/tex/context/base/mkiv/publ-tra.lua
+++ b/tex/context/base/mkiv/publ-tra.lua
@@ -103,7 +103,7 @@ function tracers.showdatasetcompleteness(settings)
local preamble = { "|lTBw(5em)|lBTp(10em)|plT|" }
- local function identified(tag,category,crossref,index)
+ local function do_identified(tag,category,crossref,index)
ctx_NC() ctx_monobold(index)
ctx_NC() ctx_monobold(category)
ctx_NC() if crossref then
@@ -114,7 +114,7 @@ function tracers.showdatasetcompleteness(settings)
ctx_NC() ctx_NR()
end
- local function required(done,foundfields,key,value,indirect)
+ local function do_required(done,found,key,value,indirect)
ctx_NC() if not done then ctx_monobold("required") end
ctx_NC() context(key)
ctx_NC()
@@ -131,11 +131,11 @@ function tracers.showdatasetcompleteness(settings)
context("\\darkred\\tttf [missing value]")
end
ctx_NC() ctx_NR()
- foundfields[key] = nil
+ found[key] = nil
return done or true
end
- local function optional(done,foundfields,key,value,indirect)
+ local function do_optional(done,found,key,value,indirect)
ctx_NC() if not done then ctx_monobold("optional") end
ctx_NC() context(key)
ctx_NC()
@@ -146,11 +146,11 @@ function tracers.showdatasetcompleteness(settings)
ctx_verbatim(value)
end
ctx_NC() ctx_NR()
- foundfields[key] = nil
+ found[key] = nil
return done or true
end
- local function special(done,key,value)
+ local function do_special(done,key,value)
ctx_NC() if not done then ctx_monobold("special") end
ctx_NC() context(key)
ctx_NC() if value then ctx_verbatim(value) end
@@ -158,7 +158,7 @@ function tracers.showdatasetcompleteness(settings)
return done or true
end
- local function extra(done,key,value)
+ local function do_extra(done,key,value)
ctx_NC() if not done then ctx_monobold("extra") end
ctx_NC() context(key)
ctx_NC() if value then ctx_verbatim(value) end
@@ -168,84 +168,112 @@ function tracers.showdatasetcompleteness(settings)
if next(luadata) then
for tag, entry in sortedhash(luadata) do
- local category = entry.category
- local fields = categories[category]
- local foundfields = { }
+ local category = entry.category
+ local fields = categories[category]
+ local found = { }
+ local flushed = { }
for k, v in next, entry do
- foundfields[k] = true
+ found[k] = true
end
ctx_starttabulate(preamble)
- identified(tag,category,entry.crossref,entry.index)
+ do_identified(tag,category,entry.crossref,entry.index)
ctx_FL()
if fields then
- local requiredfields = fields.required
- local sets = fields.sets or { }
- local done = false
- if requiredfields then
- for i=1,#requiredfields do
- local r = requiredfields[i]
+ local required = fields.required
+ local sets = fields.sets or { }
+ local done = false
+ if required then
+ for i=1,#required do
+ local r = required[i]
local r = sets[r] or r
if type(r) == "table" then
local okay = false
for i=1,#r do
local ri = r[i]
- if rawget(entry,ri) then
- done = required(done,foundfields,ri,entry[ri])
- okay = true
- elseif entry[ri] then
- done = required(done,foundfields,ri,entry[ri],true)
- okay = true
+ if not flushed[ri] then
+ -- already done
+ if rawget(entry,ri) then
+ done = do_required(done,found,ri,entry[ri])
+ okay = true
+ flushed[ri] = true
+ elseif entry[ri] then
+ done = do_required(done,found,ri,entry[ri],true)
+ okay = true
+ flushed[ri] = true
+ end
end
end
- if not okay then
- done = required(done,foundfields,table.concat(r," {\\letterbar} "))
+ if not okay and not flushed[r] then
+ done = do_required(done,found,concat(r," {\\letterbar} "))
+ flushed[r] = true
end
elseif rawget(entry,r) then
- done = required(done,foundfields,r,entry[r])
+ if not flushed[r] then
+ done = do_required(done,found,r,entry[r])
+ flushed[r] = true
+ end
elseif entry[r] then
- done = required(done,foundfields,r,entry[r],true)
+ if not flushed[r] then
+ done = do_required(done,found,r,entry[r],true)
+ flushed[r] = true
+ end
else
- done = required(done,foundfields,r)
+ if not flushed[r] then
+ done = do_required(done,found,r)
+ flushed[r] = true
+ end
end
end
end
- local optionalfields = fields.optional
- local done = false
- if optionalfields then
- for i=1,#optionalfields do
- local o = optionalfields[i]
+ local optional = fields.optional
+ local done = false
+ if optional then
+ for i=1,#optional do
+ local o = optional[i]
local o = sets[o] or o
if type(o) == "table" then
for i=1,#o do
local oi = o[i]
- if rawget(entry,oi) then
- done = optional(done,foundfields,oi,entry[oi])
- elseif entry[oi] then
- done = optional(done,foundfields,oi,entry[oi],true)
+ if not flushed[oi] then
+ if rawget(entry,oi) then
+ done = do_optional(done,found,oi,entry[oi])
+ flushed[oi] = true
+ elseif entry[oi] then
+ done = do_optional(done,found,oi,entry[oi],true)
+ flushed[oi] = true
+ end
end
end
elseif rawget(entry,o) then
- done = optional(done,foundfields,o,entry[o])
+ if not flushed[o] then
+ done = do_optional(done,found,o,entry[o])
+ flushed[o] = true
+ end
elseif entry[o] then
- done = optional(done,foundfields,o,entry[o],true)
+ if not flushed[o] then
+ done = do_optional(done,found,o,entry[o],true)
+ flushed[o] = true
+ end
end
end
end
end
local done = false
- for k, v in sortedhash(foundfields) do
+ for k, v in sortedhash(found) do
if privates[k] then
-- skip
- elseif specials[k] then
- done = special(done,k,entry[k])
+ elseif specials[k] and not flushed[k] then
+ done = do_special(done,k,entry[k])
+ flushed[k] = true
end
end
local done = false
- for k, v in sortedhash(foundfields) do
+ for k, v in sortedhash(found) do
if privates[k] then
-- skip
- elseif not specials[k] then
- done = extra(done,k,entry[k])
+ elseif not specials[k] and not flushed[k] then
+ done = do_extra(done,k,entry[k])
+ flushed[k] = true
end
end
ctx_stoptabulate()
diff --git a/tex/context/base/mkiv/publ-tra.mkiv b/tex/context/base/mkiv/publ-tra.mkiv
index 6ef86ca59..e5f23259b 100644
--- a/tex/context/base/mkiv/publ-tra.mkiv
+++ b/tex/context/base/mkiv/publ-tra.mkiv
@@ -16,18 +16,18 @@
\writestatus{loading}{ConTeXt Publication Support / Tracing}
-\registerctxluafile{publ-tra}{1.001}
+\registerctxluafile{publ-tra}{}
\unprotect
-\unexpanded\def\showbtxdatasetfields {\dosingleempty\publ_show_dataset_fields}
-\unexpanded\def\showbtxdatasetcompleteness{\dosingleempty\publ_show_dataset_completeness}
-\unexpanded\def\showbtxdatasetauthors {\dosingleempty\publ_show_dataset_authors}
-\unexpanded\def\showbtxhashedauthors {\dosingleempty\publ_show_hashed_authors}
-\unexpanded\def\showbtxfields {\dosingleempty\publ_show_fields}
-\unexpanded\def\showbtxtables {\dosingleempty\publ_show_tables}
+\unexpanded\gdef\showbtxdatasetfields {\dosingleempty\publ_show_dataset_fields}
+\unexpanded\gdef\showbtxdatasetcompleteness{\dosingleempty\publ_show_dataset_completeness}
+\unexpanded\gdef\showbtxdatasetauthors {\dosingleempty\publ_show_dataset_authors}
+\unexpanded\gdef\showbtxhashedauthors {\dosingleempty\publ_show_hashed_authors}
+\unexpanded\gdef\showbtxfields {\dosingleempty\publ_show_fields}
+\unexpanded\gdef\showbtxtables {\dosingleempty\publ_show_tables}
-\def\publ_show_dataset_whatever#1[#2]%
+\gdef\publ_show_dataset_whatever#1[#2]%
{\begingroup
\letdummyparameter\c!specification\currentbtxspecification
\setdummyparameter\c!dataset {\currentbtxdataset}%
@@ -46,11 +46,11 @@
}}%
\endgroup}
-\def\publ_show_dataset_fields {\publ_show_dataset_whatever{showbtxdatasetfields}}
-\def\publ_show_dataset_completeness{\publ_show_dataset_whatever{showbtxdatasetcompleteness}}
-\def\publ_show_dataset_authors {\publ_show_dataset_whatever{showbtxdatasetauthors}}
+\gdef\publ_show_dataset_fields {\publ_show_dataset_whatever{showbtxdatasetfields}}
+\gdef\publ_show_dataset_completeness{\publ_show_dataset_whatever{showbtxdatasetcompleteness}}
+\gdef\publ_show_dataset_authors {\publ_show_dataset_whatever{showbtxdatasetauthors}}
-\def\publ_show_fields[#1]%
+\gdef\publ_show_fields[#1]%
{\begingroup
\setdummyparameter\c!rotation{90}%
\doifelseassignment{#1}%
@@ -65,12 +65,12 @@
}}%
\endgroup}
-\def\publ_show_tables[#1]%
+\gdef\publ_show_tables[#1]%
{\begingroup
\ctxcommand{showbtxtables{}}%
\endgroup}
-\def\publ_show_hashed_authors[#1]%
+\gdef\publ_show_hashed_authors[#1]%
{\ctxcommand{showbtxhashedauthors{}}}
\protect
diff --git a/tex/context/base/mkiv/regi-ini.lua b/tex/context/base/mkiv/regi-ini.lua
index be8fa1b1c..7691e8765 100644
--- a/tex/context/base/mkiv/regi-ini.lua
+++ b/tex/context/base/mkiv/regi-ini.lua
@@ -12,6 +12,8 @@ if not modules then modules = { } end modules ['regi-ini'] = {
runtime.
--ldx]]--
+-- Todo: use regi-imp*.lua instead
+
local commands, context = commands, context
local utfchar = utf.char
@@ -134,6 +136,9 @@ end
setmetatableindex(mapping, loadregime)
setmetatableindex(backmapping,loadreverse)
+regimes.mapping = mapping
+regimes.backmapping = backmapping
+
local function fromregime(regime,line)
if line and #line > 0 then
local map = mapping[regime and synonyms[regime] or regime or currentregime]
@@ -251,6 +256,22 @@ end
regimes.push = push
regimes.pop = pop
+function regimes.list()
+ local name = resolvers.findfile(format("regi-ini.lua",regime)) or ""
+ local okay = { }
+ if name then
+ local list = dir.glob(file.join(file.dirname(name),"regi-*.lua"))
+ for i=1,#list do
+ local name = list[i]
+ if name ~= "regi-ini.lua" then
+ okay[#okay+1] = match(name,"regi%-(.-)%.lua")
+ end
+ table.sort(okay)
+ end
+ end
+ return okay
+end
+
if sequencers then
sequencers.prependaction(textlineactions,"system","regimes.process")
diff --git a/tex/context/base/mkiv/regi-ini.mkiv b/tex/context/base/mkiv/regi-ini.mkiv
index 6190393d8..532a4db0b 100644
--- a/tex/context/base/mkiv/regi-ini.mkiv
+++ b/tex/context/base/mkiv/regi-ini.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\registerctxluafile{regi-ini}{1.001}
+\registerctxluafile{regi-ini}{}
\unprotect
diff --git a/tex/context/base/mkiv/scrn-bar.mkvi b/tex/context/base/mkiv/scrn-bar.mkvi
index efb1a005b..7634398f8 100644
--- a/tex/context/base/mkiv/scrn-bar.mkvi
+++ b/tex/context/base/mkiv/scrn-bar.mkvi
@@ -201,7 +201,7 @@
\setupblackrules[\c!width=\scratchdimenone]%
\setbox\scratchbox\hbox to \d_scrn_bar_width
{\hskip2\emwidth
- \setbox\scratchbox\hbox{\blackrule[\c!color=\interactionbarparameter\c!backgroundcolor]}%
+ \setbox\scratchbox\hpack{\blackrule[\c!color=\interactionbarparameter\c!backgroundcolor]}%
\dorecurse\scratchcounter
{\hss\normalexpanded{\directgotodumbbox{\copy\scratchbox}[page(\the\numexpr\recurselevel*\lastpage/\scratchcounter\relax)]}}%
\hss
@@ -288,7 +288,7 @@
\d_scrn_bar_height \interactionbarparameter\c!height
\d_scrn_bar_depth \interactionbarparameter\c!depth
\scratchdimenone\d_scrn_bar_width
- \noindent\hbox{\scrn_bar_goto_x{\hskip\d_scrn_bar_distance}}%
+ \noindent\hbox{\scrn_bar_goto_x{\hskip\d_scrn_bar_distance}}% \hpack ?
}\fi}
\def\scrn_bar_alternative_e
@@ -302,7 +302,7 @@
\ifdim\scratchdimenone<\d_scrn_bar_distance
\scrn_bar_alternative_f
\else
- \noindent\hbox to \d_scrn_bar_width{\scrn_bar_goto_x{\hss}\unskip}%
+ \noindent\hbox to \d_scrn_bar_width{\scrn_bar_goto_x{\hss}\unskip}% \hpack ?
\fi
}\fi}
diff --git a/tex/context/base/mkiv/scrn-but.mkvi b/tex/context/base/mkiv/scrn-but.mkvi
index 65b0d2c4e..8bbd6eeda 100644
--- a/tex/context/base/mkiv/scrn-but.mkvi
+++ b/tex/context/base/mkiv/scrn-but.mkvi
@@ -16,7 +16,7 @@
\writestatus{loading}{ConTeXt Screen Macros / Buttons}
-\registerctxluafile{scrn-but}{1.001}
+\registerctxluafile{scrn-but}{}
\unprotect
@@ -75,13 +75,17 @@
{\iffirstargument
\setupcurrentbutton[#settings]%
\fi
- \scrn_button_make
- \buttonparameter
- \inheritedbuttonframed
- \letbuttonparameter
- \setbuttonparameter
- {#text}%
- {#action}%
+ % no \dontleavehmode as it will force a vbox to \hsize which then
+ % spoils a tight box
+ \hpack\bgroup
+ \scrn_button_make
+ \buttonparameter
+ \inheritedbuttonframed
+ \letbuttonparameter
+ \setbuttonparameter
+ {#text}%
+ {#action}%
+ \egroup
\endgroup}
\def\scrn_button_direct_ignore[#settings]#text[#destination]%
@@ -550,7 +554,7 @@
\setbox\b_scrn_menu\hbox to \d_scrn_menu_used_width
{\ifx\currentinteractionmenustate\v!empty \else
\interactionmenuparameter\c!left
- \scrn_menu_package_vertical{\strictinteractionmenuparameter\c!menu}%
+ \scrn_menu_package_vertical{\directinteractionmenuparameter\c!menu}%
\interactionmenuparameter\c!right
\fi}%
\edef\currentinteractionmenudistance{\interactionmenuparameter\c!distance}%
@@ -571,7 +575,7 @@
{\ifx\currentinteractionmenustate\v!none \else
\scrn_menu_top_align
\interactionmenuparameter\c!before
- \scrn_menu_package_horizontal{\strictinteractionmenuparameter\c!menu}%
+ \scrn_menu_package_horizontal{\directinteractionmenuparameter\c!menu}%
\interactionmenuparameter\c!after
\scrn_menu_bottom_align
\fi}%
@@ -784,7 +788,7 @@
\edef\currentinteractionmenu{#tag}%
\doif{\interactionmenuparameter\c!state}\v!local
{\letinteractionmenuparameter\c!state\v!start
- \strictinteractionmenuparameter\c!menu}%
+ \directinteractionmenuparameter\c!menu}%
\endgroup}
%D Direct call (todo):
diff --git a/tex/context/base/mkiv/scrn-fld.mkvi b/tex/context/base/mkiv/scrn-fld.mkvi
index d69e7beb9..38c4e1461 100644
--- a/tex/context/base/mkiv/scrn-fld.mkvi
+++ b/tex/context/base/mkiv/scrn-fld.mkvi
@@ -21,7 +21,7 @@
\unprotect
-\registerctxluafile{scrn-fld}{1.001}
+\registerctxluafile{scrn-fld}{}
%D In \MKII\ we had to cheat a bit with setups in order not to run
%D out of memory with thousands of fields, which we happen to need at
diff --git a/tex/context/base/mkiv/scrn-hlp.mkvi b/tex/context/base/mkiv/scrn-hlp.mkvi
index 7466e5687..97c930669 100644
--- a/tex/context/base/mkiv/scrn-hlp.mkvi
+++ b/tex/context/base/mkiv/scrn-hlp.mkvi
@@ -18,7 +18,7 @@
%D upgraded and usage has changed a bit. We also use some
%D \LUA\ magic in order to avoid multiple passes.
-\registerctxluafile{scrn-hlp}{1.001}
+\registerctxluafile{scrn-hlp}{}
%D Using help boils down to plugging the placement macro
%D someplace visible, like:
@@ -114,7 +114,7 @@
\fi}
\def\scrn_help_register#text%
- {\setbox\b_scrn_help_box\hbox{\inheritedhelpframed{#text}}%
+ {\setbox\b_scrn_help_box\hbox{\inheritedhelpframed{#text}}% \hpack ?
\definesymbol
[\currenthelpname]
[\copy\b_scrn_help_box]%
diff --git a/tex/context/base/mkiv/scrn-ini.mkvi b/tex/context/base/mkiv/scrn-ini.mkvi
index 93dde6962..e1be43f37 100644
--- a/tex/context/base/mkiv/scrn-ini.mkvi
+++ b/tex/context/base/mkiv/scrn-ini.mkvi
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{scrn-ini}{1.001}
+\registerctxluafile{scrn-ini}{}
%D There is no interaction at all unless enabled by saying:
%D
diff --git a/tex/context/base/mkiv/scrn-pag.lua b/tex/context/base/mkiv/scrn-pag.lua
index bd65e53d9..4f77a7d24 100644
--- a/tex/context/base/mkiv/scrn-pag.lua
+++ b/tex/context/base/mkiv/scrn-pag.lua
@@ -41,6 +41,7 @@ implement {
{ "artoffset", "dimen" },
{ "trimoffset", "dimen" },
{ "copies", "integer" },
+ { "print", "string" }, -- , tohash
}
}
}
diff --git a/tex/context/base/mkiv/scrn-pag.mkvi b/tex/context/base/mkiv/scrn-pag.mkvi
index b7e056e2b..d96d8d3c1 100644
--- a/tex/context/base/mkiv/scrn-pag.mkvi
+++ b/tex/context/base/mkiv/scrn-pag.mkvi
@@ -15,7 +15,7 @@
\writestatus{loading}{ConTeXt Screen Macros / Pages}
-\registerctxluafile{scrn-pag}{1.001}
+\registerctxluafile{scrn-pag}{}
\unprotect
@@ -228,6 +228,7 @@
bleedoffset \canvasbleedoffset
artoffset \canvasartoffset
copies \numexpr\interactionscreenparameter\c!copies\relax
+ print {\interactionscreenparameter\c!print}%
\relax
%\global\let\scrn_canvas_synchronize_simple \relax
\global\let\scrn_canvas_synchronize_complex\relax}
diff --git a/tex/context/base/mkiv/scrn-ref.mkvi b/tex/context/base/mkiv/scrn-ref.mkvi
index c165e9131..bace13312 100644
--- a/tex/context/base/mkiv/scrn-ref.mkvi
+++ b/tex/context/base/mkiv/scrn-ref.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Screen Macros / References}
-\registerctxluafile{scrn-ref}{1.001}
+\registerctxluafile{scrn-ref}{}
\unprotect
@@ -29,7 +29,7 @@
\to \everysetupinteraction
\def\scrn_reference_enable_page_destinations % no reset
- {\clf_setinnerreferencemethod{\interactionparameter\c!page}}
+ {\clf_setreferencelinkmethod{\interactionparameter\c!page}}
\setupinteraction % start fit page and reset form
[\c!page=\v!no,
diff --git a/tex/context/base/mkiv/scrn-wid.lua b/tex/context/base/mkiv/scrn-wid.lua
index 3ce904349..636d547da 100644
--- a/tex/context/base/mkiv/scrn-wid.lua
+++ b/tex/context/base/mkiv/scrn-wid.lua
@@ -6,6 +6,23 @@ if not modules then modules = { } end modules ['scrn-wid'] = {
license = "see context related readme files"
}
+-- Support for interactive features is handled elsewhere. Now that is some mess! In
+-- the early days one had media features like sound and movies that were easy to set
+-- up. Then at some point renditions came around which were more work and somewhat
+-- unreliable. Now, both mechanism are obsolete and replaced by rich media which is
+-- a huge mess and has no real concept of what media are supported. There's flash
+-- cq. shockwave (basically obsolete too), and for instance mp4 needs to be handled
+-- by a swf player, and there's u3d which somehow has its own specification. One
+-- would expect native support for video and audio to be en-par with browsers but
+-- alas ... pdf has lost the battle with html here due to a few decades of
+-- unstability and changing support. So far we could catch on and even were ahead
+-- but I wonder if we should keep doing that. As we can't trust support for media we
+-- can better not embed anything and just use a hyperlink to an external resource. No
+-- sane person will create media rich pdf's as long as it's that unpredictable. Just
+-- look at the specification and viewer preferences and decide.
+
+local next = next
+
interactions = interactions or { }
local interactions = interactions
@@ -223,7 +240,7 @@ function soundclips.insert(tag)
end
implement {
- name = registersoundclip,
+ name = "registersoundclip",
actions = soundclips.register,
arguments = {
{
@@ -234,7 +251,7 @@ implement {
}
implement {
- name = insertsoundclip,
+ name = "insertsoundclip",
actions = soundclips.insert,
arguments = {
{
diff --git a/tex/context/base/mkiv/scrn-wid.mkvi b/tex/context/base/mkiv/scrn-wid.mkvi
index f19da57f7..c74125c72 100644
--- a/tex/context/base/mkiv/scrn-wid.mkvi
+++ b/tex/context/base/mkiv/scrn-wid.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Interaction Macros / Widgets}
-\registerctxluafile{scrn-wid}{1.001}
+\registerctxluafile{scrn-wid}{}
% todo: expansion in comments (default is expanded)
% todo: check renderings ... acrobat crashes too easily on missing one
@@ -86,7 +86,7 @@
%\c!method=, % \v!hidden = not in menu
%\c!buffer=
\c!symbol=,
- \c!distance=1em,
+ \c!distance=\emwidth,
\c!width=\v!fit,
\c!height=\v!fit,
\c!depth=\v!fit,
@@ -223,6 +223,12 @@
tag {\currentattachment}%
registered {\currentattachmentregistered}%
method {\v!hidden}%
+ title {\attachmentparameter\c!title}%
+ subtitle {\attachmentparameter\c!subtitle}%
+ author {\attachmentparameter\c!author}%
+ file {\attachmentparameter\c!file}%
+ name {\attachmentparameter\c!name}%
+ buffer {\attachmentparameter\c!buffer}%
\relax}
\def\scrn_attachment_place
@@ -243,7 +249,7 @@
{\global\setbox\b_scrn_attachment_collect\hbox\bgroup
\ifvoid\b_scrn_attachment_collect\else
\box\b_scrn_attachment_collect
- \hskip\attachmentparameter\c!distance
+ \hskip\attachmentparameter\c!distance\relax
\fi
#content%
\egroup}
@@ -366,7 +372,7 @@
\setupcomment
[\c!state=\v!start,
- \c!distance=1em,
+ \c!distance=\emwidth,
\c!color=\interactionparameter\c!color,
\c!space=\v!no,
\c!symbol=,
@@ -499,7 +505,7 @@
{\global\setbox\b_scrn_comment_collect\hbox\bgroup
\ifvoid\b_scrn_comment_collect\else
\box\b_scrn_comment_collect
- \hskip\commentparameter\c!distance
+ \hskip\commentparameter\c!distance\relax
\fi
\box\b_scrn_comment_link
\egroup}
@@ -592,7 +598,7 @@
option {#option}%
\relax
\let\objectoffset\zeropoint
- \setobject{IRO}{#tag}\hbox{\box\nextbox}%
+ \setobject{IRO}{#tag}\hpack{\box\nextbox}%
\egroup}%
\hbox}
@@ -649,7 +655,8 @@
% \handlereferenceactions{\renderingwindowparameter\c!closepageaction}\dosetuprenderingclosepageaction
\letrenderingwindowparameter\c!offset\v!overlay
\inheritedrenderingwindowframed
- {\vfill
+ {\vfilll
+ \dontleavehmode
\clf_insertrenderingwindow
label {\currentrendering}%
width \d_scrn_rendering_width
@@ -696,7 +703,7 @@
% \iflocation
% \edef\currentlinkedlist{#1}%
% \ifcsname\??lk\currentlinkedlist\s!parent\endcsname
-% \hskip\linkedlistparameter\c!distance
+% \hskip\linkedlistparameter\c!distance\relax
% \clf_addlinklistelement{\currentlinkedlist}%
% \expanded{\ctxlatecommand{enhancelinkedlist("\currentlinkedlist",\currentlink)}}% can also be done at the lua end
% \dogotosomepage {\??lk\currentlinkedlist}\gotobegincharacter \firstlink
diff --git a/tex/context/base/mkiv/scrp-cjk.lua b/tex/context/base/mkiv/scrp-cjk.lua
index d2ec201ca..b31dc335a 100644
--- a/tex/context/base/mkiv/scrp-cjk.lua
+++ b/tex/context/base/mkiv/scrp-cjk.lua
@@ -32,7 +32,6 @@ local getid = nuts.getid
local getattr = nuts.getattr
local getsubtype = nuts.getsubtype
local getwidth = nuts.getwidth
-local getfield = nuts.getfield
local setchar = nuts.setchar
diff --git a/tex/context/base/mkiv/scrp-ini.lua b/tex/context/base/mkiv/scrp-ini.lua
index 02b289137..d5cad643f 100644
--- a/tex/context/base/mkiv/scrp-ini.lua
+++ b/tex/context/base/mkiv/scrp-ini.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['scrp-ini'] = {
-- We need to rewrite this a bit ... rather old code ... will be done when japanese
-- is finished.
-local attributes, nodes, node = attributes, nodes, node
+local tonumber, next = tonumber, next
local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
@@ -22,6 +22,10 @@ local report_splitting = logs.reporter("scripts","splitting")
local utfbyte, utfsplit = utf.byte, utf.split
local gmatch = string.gmatch
+local attributes = attributes
+local nodes = nodes
+local context = context
+
local texsetattribute = tex.setattribute
local nodecodes = nodes.nodecodes
diff --git a/tex/context/base/mkiv/scrp-ini.mkiv b/tex/context/base/mkiv/scrp-ini.mkiv
index cd060c02b..8f28f505d 100644
--- a/tex/context/base/mkiv/scrp-ini.mkiv
+++ b/tex/context/base/mkiv/scrp-ini.mkiv
@@ -13,10 +13,10 @@
% here we collect code from other places (was organized differently)
-\registerctxluafile{scrp-ini}{1.001}
-\registerctxluafile{scrp-cjk}{1.001}
-\registerctxluafile{scrp-eth}{1.001}
-\registerctxluafile{scrp-tha}{1.001}
+\registerctxluafile{scrp-ini}{}
+\registerctxluafile{scrp-cjk}{}
+\registerctxluafile{scrp-eth}{}
+\registerctxluafile{scrp-tha}{}
\definesystemattribute[scriptinjection][public]
\definesystemattribute[scriptsplitting][public]
diff --git a/tex/context/base/mkiv/sort-ini.mkiv b/tex/context/base/mkiv/sort-ini.mkiv
index b7b490255..d9f9628d5 100644
--- a/tex/context/base/mkiv/sort-ini.mkiv
+++ b/tex/context/base/mkiv/sort-ini.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Sorting Macros / Initialization}
-\registerctxluafile{sort-ini}{1.001}
-\registerctxluafile{sort-lan}{1.001}
+\registerctxluafile{sort-ini}{}
+\registerctxluafile{sort-lan}{}
\endinput
diff --git a/tex/context/base/mkiv/sort-lan.lua b/tex/context/base/mkiv/sort-lan.lua
index 21aabf3eb..1aa173d1b 100644
--- a/tex/context/base/mkiv/sort-lan.lua
+++ b/tex/context/base/mkiv/sort-lan.lua
@@ -710,11 +710,11 @@ definitions["it"] = {
entries = {
["a"] = "a", ["á"] = "a", ["b"] = "b", ["c"] = "c", ["d"] = "d",
["e"] = "e", ["é"] = "e", ["è"] = "e", ["f"] = "f", ["g"] = "g",
- ["h"] = "h", ["i"] = "i", ["í"] = "i", ["ì"] = "i", ["j"] = "i",
+ ["h"] = "h", ["i"] = "i", ["í"] = "i", ["ì"] = "i", ["j"] = "j",
["k"] = "k", ["l"] = "l", ["m"] = "m", ["n"] = "n", ["o"] = "o",
["ó"] = "o", ["ò"] = "o", ["p"] = "p", ["q"] = "q", ["r"] = "r",
["s"] = "s", ["t"] = "t", ["u"] = "u", ["ú"] = "u", ["ù"] = "u",
- ["v"] = "u", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z",
+ ["v"] = "v", ["w"] = "w", ["x"] = "x", ["y"] = "y", ["z"] = "z",
},
orders = {
"a", "á", "b", "c", "d", "e", "é", "è", "f", "g",
diff --git a/tex/context/base/mkiv/spac-adj.mkiv b/tex/context/base/mkiv/spac-adj.mkiv
index 936e00624..d29d15c17 100644
--- a/tex/context/base/mkiv/spac-adj.mkiv
+++ b/tex/context/base/mkiv/spac-adj.mkiv
@@ -18,7 +18,7 @@
% Very nasty but needed for margin stuff inside colored
% paragraphs. Obsolete for while .
-\registerctxluafile{spac-adj}{1.001}
+\registerctxluafile{spac-adj}{}
\definesystemattribute [graphicvadjust] [public]
diff --git a/tex/context/base/mkiv/spac-ali.lua b/tex/context/base/mkiv/spac-ali.lua
index bc77090cf..640478d34 100644
--- a/tex/context/base/mkiv/spac-ali.lua
+++ b/tex/context/base/mkiv/spac-ali.lua
@@ -18,7 +18,6 @@ local nodepool = nuts.pool
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
diff --git a/tex/context/base/mkiv/spac-ali.mkiv b/tex/context/base/mkiv/spac-ali.mkiv
index af02f76ae..fbcf45caa 100644
--- a/tex/context/base/mkiv/spac-ali.mkiv
+++ b/tex/context/base/mkiv/spac-ali.mkiv
@@ -27,7 +27,7 @@
%
% but that also means myoption gets frozen due to caching.
-\registerctxluafile{spac-ali}{1.001}
+\registerctxluafile{spac-ali}{}
\definesystemattribute[realign] [public] % might be combined with the next one
\definesystemattribute[alignstate][public] % will make a single attributes for several states
@@ -61,17 +61,15 @@
\newtoks\everyresetalign % todo
-% We will not use bodydir and pagedir so we disable them. That way we get
-% normal hyperlink support.
+%D We will not use bodydir and pagedir so we disable them. That way we get
+%D normal hyperlink support. We back on it (too hard to fake \type {\the}).
-\let\spac_directions_normal_body_dir\normalbodydir
-\let\spac_directions_normal_page_dir\normalpagedir
+\unexpanded\def\syst_fatal_dir_error#1%
+ {\writestatus{fatal error}{\string#1\space is forbidden}%
+ \wait}
-\unexpanded\def\bodydir{\afterassignment\spac_directions_bodydir_indeed\spac_directions_normal_body_dir} \let\normalbodydir\bodydir
-\unexpanded\def\pagedir{\afterassignment\spac_directions_pagedir_indeed\spac_directions_normal_page_dir} \let\normalpagedir\pagedir
-
-\def\spac_directions_bodydir_indeed{\spac_directions_normal_body_dir TLT\relax}
-\def\spac_directions_pagedir_indeed{\spac_directions_normal_page_dir TLT\relax}
+\def\pagedir {\syst_fatal_dir_error\pagedir} \let\normalpagedir\pagedir
+\def\bodydir {\syst_fatal_dir_error\bodydir} \let\normalbodydir\bodydir
% This will become a more advanced layout controller soon:
@@ -105,59 +103,45 @@
\textdir TRT\relax
\pardir TRT\relax}
-\newconditional\c_spac_auto_line_dir \settrue\c_spac_auto_line_dir
-
-\ifdefined\linedir
-
- \unexpanded\def\spac_directions_lefttoright_hmode
- {\ifconditional\c_spac_auto_line_dir\linedir\else\textdir\fi TLT\relax % linedir keeps subtype
- \setfalse\inlinerighttoleft}
-
- \unexpanded\def\spac_directions_righttoleft_hmode
- {\ifconditional\c_spac_auto_line_dir\linedir\else\textdir\fi TRT\relax % linedir keeps subtype
- \setfalse\inlinelefttoright}
-
-\else % keep this as reference
-
- \unexpanded\def\spac_directions_lefttoright_hmode
- {\settrue\inlinelefttoright
- \textdir TLT\relax}
-
- \unexpanded\def\spac_directions_righttoleft_hmode
- {\textdir TRT\relax
- \setfalse\inlinelefttoright}
-
- \unexpanded\def\spac_directions_lefttoright_hmode
- {\ifconditional\c_spac_auto_line_dir
- \ifzeropt\lastskip
- \textdir TLT\relax
- \else
- \scratchskip\lastskip
- \unskip
- \textdir TLT\relax
- \hskip\scratchskip
- \fi
- \else
- \textdir TLT\relax
- \fi
- \setfalse\inlinerighttoleft}
-
- \unexpanded\def\spac_directions_righttoleft_hmode
- {\ifconditional\c_spac_auto_line_dir
- \ifzeropt\lastskip
- \textdir TRT\relax
- \else
- \scratchskip\lastskip
- \unskip
- \textdir TRT\relax
- \hskip\scratchskip
- \fi
- \else
- \textdir TRT\relax
- \fi
- \setfalse\inlinelefttoright}
+% % keep this as reference
+%
+% \unexpanded\def\spac_directions_lefttoright_hmode
+% {\textdir TLT\relax
+% \settrue\inlinelefttoright}
+%
+% \unexpanded\def\spac_directions_righttoleft_hmode
+% {\textdir TRT\relax
+% \setfalse\inlinelefttoright}
+%
+% \unexpanded\def\spac_directions_lefttoright_hmode
+% {\ifzeropt\lastskip
+% \textdir TLT\relax
+% \else
+% \scratchskip\lastskip
+% \unskip
+% \textdir TLT\relax
+% \hskip\scratchskip
+% \fi
+% \settrue\inlinelefttoright}
+%
+% \unexpanded\def\spac_directions_righttoleft_hmode
+% {\ifzeropt\lastskip
+% \textdir TRT\relax
+% \else
+% \scratchskip\lastskip
+% \unskip
+% \textdir TRT\relax
+% \hskip\scratchskip
+% \fi
+% \setfalse\inlinelefttoright}
-\fi
+\unexpanded\def\spac_directions_lefttoright_hmode
+ {\linedir TLT\relax % linedir keeps subtype of skip
+ \settrue\inlinelefttoright}
+
+\unexpanded\def\spac_directions_righttoleft_hmode
+ {\linedir TRT\relax % linedir keeps subtype of skip
+ \setfalse\inlinelefttoright}
% \def\currentdirectionparameters
% {\ifconditional\inlinelefttoright \else
@@ -194,6 +178,38 @@
\unexpanded\def\synchronizeinlinedirection
{\textdir T\ifconditional\inlinelefttoright L\else R\fi T\relax}
+\unexpanded\def\checkedlefttoright
+ {\ifvmode
+ \spac_directions_lefttoright_vmode
+ \else
+ \spac_directions_lefttoright_hmode_checked
+ \fi}
+
+\unexpanded\def\checkedrighttoleft
+ {\ifvmode
+ \spac_directions_righttoleft_vmode
+ \else
+ \spac_directions_righttoleft_hmode_checked
+ \fi}
+
+\unexpanded\def\spac_directions_lefttoright_hmode_checked
+ {\ifconditional\inlinelefttoright\else
+ \lefttoright
+ \fi}
+
+\unexpanded\def\spac_directions_righttoleft_hmode_checked
+ {\ifconditional\inlinelefttoright
+ \righttoleft
+ \fi}
+
+\installcorenamespace{bidi}
+
+\letvalue{\??bidi\v!left }\checkedlefttoright \letvalue{\??bidi l2r}\checkedlefttoright
+\letvalue{\??bidi\v!right}\checkedrighttoleft \letvalue{\??bidi r2l}\checkedrighttoleft
+
+\unexpanded\def\usebidiparameter#1%
+ {\begincsname\??bidi#1\c!bidi\endcsname}
+
% maybe some day:
%
% \newcount\postdirpenalty % \zerocount
@@ -791,10 +807,20 @@
\setvalue{\??aligncommand\v!extremestretch }{\toksapp\t_spac_align_collected{\spac_align_set_extreme_stretch}}
\setvalue{\??aligncommand \v!final}{\c_spac_align_state_par_fill\plusone}
+\setvalue{\??aligncommand1*\v!final}{\c_spac_align_state_par_fill\plusone}
\setvalue{\??aligncommand2*\v!final}{\c_spac_align_state_par_fill\plustwo} % hardcoded multiplier
\setvalue{\??aligncommand3*\v!final}{\c_spac_align_state_par_fill\plusthree}
\setvalue{\??aligncommand4*\v!final}{\c_spac_align_state_par_fill\plusfour}
+% a one shot (only usefull in e.g. framed, also needs tolerance and stretch)
+
+\setvalue{\??aligncommand \v!more}{\toksapp\t_spac_align_collected{\looseness\plusone}}
+\setvalue{\??aligncommand1*\v!more}{\toksapp\t_spac_align_collected{\looseness\plusone}}
+\setvalue{\??aligncommand2*\v!more}{\toksapp\t_spac_align_collected{\looseness\plustwo}}
+
+% \setvalue{\??aligncommand ... }{\toksapp\t_spac_align_collected{\nopenalties}}
+% \setvalue{\??aligncommand ... }{\toksapp\t_spac_align_collected{\setdefaultpenalties}}
+
\definehspace [\v!final] [\emspaceamount]
\def\spac_align_flush_parfill
@@ -963,13 +989,14 @@
\dontleavehmode
\begingroup
\inleftmargin{\vsmash{\infofont\framed[\c!align=\v!right]{\thepardata}}}%
- \endgroup}
+ \endgroup
+ \let\showpardata\relax}
\unexpanded\def\startshowpardata
{\begingroup
\showstruts
\tracingparagraphs\maxdimen
- \appendtoksonce\showpardata\let\showpardata\relax\to\everypar}
+ \appendtoksonce\showpardata\to\everypar}
\unexpanded\def\stopshowpardata
{\endgraf
diff --git a/tex/context/base/mkiv/spac-chr.lua b/tex/context/base/mkiv/spac-chr.lua
index fe402ed87..0fa639f92 100644
--- a/tex/context/base/mkiv/spac-chr.lua
+++ b/tex/context/base/mkiv/spac-chr.lua
@@ -30,16 +30,17 @@ local nuts = nodes.nuts
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getattr = nuts.getattr
local setattr = nuts.setattr
+local getlang = nuts.getlang
+local setchar = nuts.setchar
local setattrlist = nuts.setattrlist
local getfont = nuts.getfont
local getchar = nuts.getchar
local setsubtype = nuts.setsubtype
+local setdisc = nuts.setdisc
local isglyph = nuts.isglyph
local setcolor = nodes.tracers.colors.set
@@ -48,6 +49,9 @@ local insert_node_before = nuts.insert_before
local insert_node_after = nuts.insert_after
local remove_node = nuts.remove
local traverse_id = nuts.traverse_id
+local traverse_char = nuts.traverse_char
+
+local copy_node = nuts.copy
local tasks = nodes.tasks
@@ -56,12 +60,15 @@ local new_penalty = nodepool.penalty
local new_glue = nodepool.glue
local new_kern = nodepool.kern
local new_rule = nodepool.rule
+local new_disc = nodepool.disc
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
-local glyph_code = nodecodes.glyph
+local disccodes = nodes.disccodes
-local space_skip_code = skipcodes["spaceskip"]
+local glyph_code = nodecodes.glyph
+local space_skip_code = skipcodes.spaceskip
+local explicit_code = disccodes.explicit
local chardata = characters.data
local is_punctuation = characters.is_punctuation
@@ -136,6 +143,7 @@ local function nbsp(head,current)
setsubtype(current,space_skip_code)
else
head, current = inject_nobreak_space(0x00A0,head,current,para.space,para.spacestretch,para.spaceshrink)
+ setsubtype(current,space_skip_code)
end
return head, current
end
@@ -148,16 +156,37 @@ function characters.replacenbsp(head,original)
return head, current
end
+-- function characters.replacenbspaces(head)
+-- for current in traverse_id(glyph_code,head) do
+-- if getchar(current) == 0x00A0 then
+-- local h = nbsp(head,current)
+-- if h then
+-- head = remove_node(h,current,true)
+-- end
+-- end
+-- end
+-- return head
+-- end
+
function characters.replacenbspaces(head)
- for current in traverse_id(glyph_code,head) do
+ local head = tonut(head)
+ local wipe = false
+ for current in traverse_id(glyph_code,head) do -- can be anytiem so no traverse_char
if getchar(current) == 0x00A0 then
+ if wipe then
+ head = remove_node(h,current,true)
+ wipe = false
+ end
local h = nbsp(head,current)
if h then
- head = remove_node(h,current,true)
+ wipe = current
end
end
end
- return head
+ if wipe then
+ head = remove_node(h,current,true)
+ end
+ return tonode(head)
end
-- This initialization might move someplace else if we need more of it. The problem is that
@@ -219,6 +248,10 @@ local methods = {
return nbsp(head,current)
end,
+ [0x00AD] = function(head,current) -- softhyphen
+ return insert_node_after(head,current,languages.explicithyphen(current))
+ end,
+
[0x2000] = function(head,current) -- enquad
return inject_quad_space(0x2000,head,current,1/2)
end,
@@ -272,9 +305,11 @@ local methods = {
end,
[0x205F] = function(head,current) -- math thinspace
- return inject_nobreak_space(0x205F,head,current,fontparameters[getfont(current)].space/8)
+ return inject_nobreak_space(0x205F,head,current,4*fontquads[getfont(current)]/18)
end,
+ -- The next one is also a bom so maybe only when we have glyphs around it
+
-- [0xFEFF] = function(head,current) -- zerowidthnobreakspace
-- return head, current
-- end,
@@ -283,29 +318,64 @@ local methods = {
characters.methods = methods
-function characters.handler(head) -- todo: use traverse_id
- head = tonut(head)
- local current = head
- local done = false
- while current do
- local char, id = isglyph(current)
+-- function characters.handler(head) -- todo: use traverse_id
+-- head = tonut(head)
+-- local current = head
+-- local done = false
+-- while current do
+-- local char, id = isglyph(current)
+-- if char then
+-- local next = getnext(current)
+-- local method = methods[char]
+-- if method then
+-- if trace_characters then
+-- report_characters("replacing character %C, description %a",char,lower(chardata[char].description))
+-- end
+-- local h = method(head,current)
+-- if h then
+-- head = remove_node(h,current,true)
+-- end
+-- done = true
+-- end
+-- current = next
+-- else
+-- current = getnext(current)
+-- end
+-- end
+-- return tonode(head), done
+-- end
+
+-- for current, char, font in traverse_char_data(head) will save 0.015 on a 300 page doc
+
+-- this also works ok in math as we run over glyphs and these stay glyphs ... not sure
+-- about scripts and such but that is not important anyway ... some day we can consider
+-- special definitions in math
+
+function characters.handler(head)
+ local head = tonut(head)
+ local wipe = false
+ for current in traverse_char(head) do
+ local char = getchar(current)
if char then
- local next = getnext(current)
local method = methods[char]
if method then
+ if wipe then
+ head = remove_node(head,wipe,true)
+ wipe = false
+ end
if trace_characters then
report_characters("replacing character %C, description %a",char,lower(chardata[char].description))
end
local h = method(head,current)
if h then
- head = remove_node(h,current,true)
+ wipe = current
end
done = true
end
- current = next
- else
- current = getnext(current)
end
end
+ if wipe then
+ head = remove_node(head,wipe,true)
+ end
return tonode(head), done
end
diff --git a/tex/context/base/mkiv/spac-chr.mkiv b/tex/context/base/mkiv/spac-chr.mkiv
index c4aadd49f..ed2cb47f9 100644
--- a/tex/context/base/mkiv/spac-chr.mkiv
+++ b/tex/context/base/mkiv/spac-chr.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{spac-chr}{1.001}
+\registerctxluafile{spac-chr}{}
\definesystemattribute[characters][public]
@@ -56,8 +56,13 @@
% \nobreakspace {\begingroup\setalignstateattribute\normalUchar"00A0\endgroup} % 1 = left
\edef\nobreakspace {\normalUchar"00A0} % space
-\edef\ideographicspace {\normalUchar"2000} % quad/2
-\edef\ideographichalffillspace{\normalUchar"2001} % quad
+\edef\softhyphen {\normalUchar"00AD} % softhyohen
+\edef\enquad {\normalUchar"2000} % quad/2
+\edef\emquad {\normalUchar"2001} % quad
+
+\let\ideographicspace \enquad
+\let\ideographichalffillspace \emquad
+
\edef\twoperemspace {\normalUchar"2002} % quad/2
% % "2003 % quad == \quad == \hskip\emwidth
\edef\threeperemspace {\normalUchar"2004} % quad/3
@@ -79,9 +84,9 @@
\udef\zerowidthnobreakspace {\penalty\plustenthousand\kern\zeropoint}
\udef\fiveperemspace {\hskip\dimexpr\emwidth/5\relax}
-\let\zwnj\zerowidthnonjoiner
-\let\zwj \zerowidthjoiner
-\let\nbsp\nobreakspace
+\let\zwnj \zerowidthnonjoiner
+\let\zwj \zerowidthjoiner
+\let\nbsp \nobreakspace
\chardef\optionalspace"1F % will be space unless before punctuation
diff --git a/tex/context/base/mkiv/spac-grd.mkiv b/tex/context/base/mkiv/spac-grd.mkiv
index 899b6e890..73c6e0dd1 100644
--- a/tex/context/base/mkiv/spac-grd.mkiv
+++ b/tex/context/base/mkiv/spac-grd.mkiv
@@ -303,18 +303,7 @@
% This is new (and experimental) and might replace some of the above. beware it doesn't always work
% out well, e.g. when used grouped and such (e.g. before display math doesn't work out well).
-\unexpanded\def\spac_fake_next_line_new
- {\par
- \begingroup
- \reseteverypar
- \dontleavehmode\hpack{\strut}\par
- \clf_fakenextstrutline
- \ifdim\pagetotal>\lineheight
- \pagetotal\dimexpr\pagetotal-\lineheight\relax
- \fi
- \endgroup}
-
-% \unexpanded\def\spac_fake_next_line_old
+% \unexpanded\def\fakenextstrutline
% {\par
% \begingroup
% \reseteverypar
@@ -323,7 +312,24 @@
% \vskip-\struttotal
% \endgroup}
-%let\fakenextstrutline\spac_fake_next_line_old
-\let\fakenextstrutline\spac_fake_next_line_new
+% \unexpanded\def\fakenextstrutline
+% {\par
+% \begingroup
+% \reseteverypar
+% \dontleavehmode\hpack{\strut}\par
+% \clf_fakenextstrutline
+% \ifdim\pagetotal>\lineheight
+% \pagetotal\dimexpr\pagetotal-\lineheight\relax
+% \fi
+% \endgroup}
+
+% \unexpanded\def\fakenextstrutline
+% {\par
+% \begingroup
+% \reseteverypar
+% \forgetall
+% \dontleavehmode\hpack{\strut}\par
+% \clf_removelastline
+% \endgroup}
\protect \endinput
diff --git a/tex/context/base/mkiv/spac-hor.mkiv b/tex/context/base/mkiv/spac-hor.mkiv
index 405abcb5d..ce747a202 100644
--- a/tex/context/base/mkiv/spac-hor.mkiv
+++ b/tex/context/base/mkiv/spac-hor.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{spac-hor}{1.001}
+\registerctxluafile{spac-hor}{}
\let \parfillrightskip \parfillskip
\newskip\parfillleftskip
@@ -381,9 +381,9 @@
%D \macros
%D {frenchspacing,nonfrenchspacing}
%D
-%D Somehow \type{\frenchspacing} can lead to hyphenation between
-%D dashes so we now have \type {\newfrenchspacing} (moved from
-%D \type {syst-chr}).
+%D Somehow \type{\frenchspacing} can lead to hyphenation between dashes so we now
+%D have \type {\newfrenchspacing} (moved from \type {syst-chr}). Maybe it's not
+%D needed any more.
%D Hm ... todo:
@@ -515,7 +515,7 @@
\fi
\ifdefined\softhyphen \else
- \let\softhyphen\-
+ \let\softhyphen\explicitdiscretionary
\fi
\cldcontext{"\string\\unexpanded\string\\def\string\\\string\n{\string\\space}"}
@@ -680,6 +680,7 @@
\global \s_spac_narrower_middle \zeropoint
\global \s_spac_narrower_right \zeropoint\relax}
\installnarrowermethod \v!none {}
+\installnarrowermethod \v!reverse {} % never seen
\unexpanded\def\spac_narrower_start#1%
{\begingroup
@@ -693,19 +694,45 @@
\spac_narrower_start_apply{\narrowerparameter\v!default}%
\fi}
+\newskip\s_spac_narrower_left_last
+\newskip\s_spac_narrower_right_last
+\newconditional\s_spac_narrower_last_swap
+
\def\spac_narrower_start_apply#1%
{\narrowerparameter\c!before
\global\s_spac_narrower_left \zeropoint
\global\s_spac_narrower_right \zeropoint
\global\s_spac_narrower_middle\zeropoint
- \normalexpanded{\processcommalistwithparameters[#1]}\spac_narrower_initialize
- \advance\leftskip \dimexpr\s_spac_narrower_left +\s_spac_narrower_middle\relax
- \advance\rightskip\dimexpr\s_spac_narrower_right+\s_spac_narrower_middle\relax
+ \edef\askednarrower{#1}
+ \ifx\askednarrower\v!reverse
+ \ifconditional\s_spac_narrower_last_swap
+ \leftskip \s_spac_narrower_right_last
+ \rightskip\s_spac_narrower_left_last
+ \setfalse\s_spac_narrower_last_swap
+ \else
+ \leftskip \s_spac_narrower_left_last
+ \rightskip\s_spac_narrower_right_last
+ \settrue\s_spac_narrower_last_swap
+ \fi
+ \else
+ \normalexpanded{\processcommalistwithparameters[\askednarrower]}\spac_narrower_initialize
+ \advance\leftskip \dimexpr\s_spac_narrower_left +\s_spac_narrower_middle\relax
+ \advance\rightskip\dimexpr\s_spac_narrower_right+\s_spac_narrower_middle\relax
+ \fi
\seteffectivehsize}
\unexpanded\def\spac_narrower_stop
{\narrowerparameter\c!after
- \endgroup}
+ \normalexpanded{%
+ \endgroup
+ \s_spac_narrower_left_last \the\leftskip \relax
+ \s_spac_narrower_right_last\the\rightskip\relax
+ \ifconditional\s_spac_narrower_last_swap
+ \setfalse\s_spac_narrower_last_swap
+ \else
+ \settrue\s_spac_narrower_last_swap
+ \fi
+ }}
\unexpanded\def\startnarrower
{\dosingleempty\spac_narrower_start_basic}
@@ -822,14 +849,16 @@
\forgetbothskips
\to \everyforgetall
-\unexpanded\def\forgetparskip
- {\s_spac_whitespace_parskip\zeropoint
- \parskip\zeropoint
- \let\v_spac_whitespace_current\v!none}
-
-\appendtoks
- \forgetparskip
-\to \everyforgetall
+% in spac-ver.mkiv
+%
+% \unexpanded\def\forgetparskip
+% {\s_spac_whitespace_parskip\zeropoint
+% \parskip\zeropoint
+% \let\v_spac_whitespace_current\v!none}
+%
+% \appendtoks
+% \forgetparskip
+% \to \everyforgetall
%D Tolerance (can also be set with align):
@@ -1013,19 +1042,22 @@
%D adapted to \type {\hspace}:
\unexpanded\def\textormathspace #1#2#3{\ifmmode\mskip#1#2\else\kern #1\hspaceamount\empty{#3}\fi\relax}
+\unexpanded\def\textormathspacecommand #1#2#3{\ifmmode\mskip#1#2\else#3\fi\relax}
\unexpanded\def\breakabletextormathspace#1#2#3{\ifmmode\mskip#1#2\else\hskip#1\hspaceamount\empty{#3}\fi\relax}
\newmuskip\hairmuskip \hairmuskip=.15mu
\unexpanded\def\hairspace {\textormathspace+\hairmuskip{.5}}
\unexpanded\def\thinspace {\textormathspace+\thinmuskip 1}
-\unexpanded\def\medspace {\textormathspace+\medmuskip 2}
+%unexpanded\def\medspace {\textormathspace+\medmuskip 2} % 4/18 em
\unexpanded\def\thickspace {\textormathspace+\thickmuskip3}
\unexpanded\def\neghairspace {\textormathspace-\thinmuskip{.5}}
\unexpanded\def\negthinspace {\textormathspace-\thinmuskip 1}
\unexpanded\def\negmedspace {\textormathspace-\medmuskip 2}
\unexpanded\def\negthickspace{\textormathspace-\thickmuskip3}
+\unexpanded\edef\medspace {\textormathspacecommand+\medmuskip{\Uchar"205F}}
+
% needed for unicode:
%unexpanded\def\breakablethinspace {\breakabletextormathspace+\thinmuskip1}
@@ -1043,10 +1075,10 @@
%unexpanded\def\zerowidthnobreakspace {\penalty\plustenthousand\kern\zeropoint}
%unexpanded\def\zerowidthspace {\hskip\zeropoint}
-\definehspace[.5][.1250\emspaceamount] % could also be [.1250\spaceamount]
-\definehspace[1] [.1667\emspaceamount]
-\definehspace[2] [.2222\emspaceamount]
-\definehspace[3] [.2777\emspaceamount]
+\definehspace[.5][.1250\emwidth] % hair
+\definehspace[1] [.1667\emwidth] % thin
+\definehspace[2] [.2222\emwidth] % med
+\definehspace[3] [.2777\emwidth] % thick
\let \, \thinspace
\let \: \medspace
diff --git a/tex/context/base/mkiv/spac-prf.lua b/tex/context/base/mkiv/spac-prf.lua
index 841e5d271..a28f30593 100644
--- a/tex/context/base/mkiv/spac-prf.lua
+++ b/tex/context/base/mkiv/spac-prf.lua
@@ -57,8 +57,8 @@ local getshift = nuts.getshift
local getwidth = nuts.getwidth
local getheight = nuts.getheight
local getdepth = nuts.getdepth
+local getboxglue = nuts.getboxglue
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setlist = nuts.setlist
local setattr = nuts.setattr
@@ -118,9 +118,7 @@ local function getprofile(line,step)
return
end
- local glue_sign = getfield(line,"glue_sign")
- local glue_order = getfield(line,"glue_order")
- local glue_set = getfield(line,"glue_set")
+ local glue_set, glue_order, glue_sign = getboxglue(line)
local heights = { }
local depths = { }
@@ -474,8 +472,7 @@ local function inject(top,bot,amount) -- todo: look at penalties
setattr(glue,a_profilemethod,0)
setattr(glue,a_visual,getattr(top,a_visual))
--
- setlink(glue,bot)
- setlink(top,glue)
+ setlink(top,glue,bot)
end
methods[v_none] = function()
diff --git a/tex/context/base/mkiv/spac-prf.mkvi b/tex/context/base/mkiv/spac-prf.mkvi
index 8d150f58d..9a00da395 100644
--- a/tex/context/base/mkiv/spac-prf.mkvi
+++ b/tex/context/base/mkiv/spac-prf.mkvi
@@ -21,7 +21,7 @@
\unprotect
-\registerctxluafile{spac-prf}{1.001}
+\registerctxluafile{spac-prf}{}
\definesystemattribute[profilemethod][public]
diff --git a/tex/context/base/mkiv/spac-ver.lua b/tex/context/base/mkiv/spac-ver.lua
index 2f0191e6a..288630a5d 100644
--- a/tex/context/base/mkiv/spac-ver.lua
+++ b/tex/context/base/mkiv/spac-ver.lua
@@ -30,8 +30,6 @@ if not modules then modules = { } end modules ['spac-ver'] = {
-- todo: strip baselineskip around display math
--- todo: getglue(n,false) instead of getfield
-
local next, type, tonumber = next, type, tonumber
local gmatch, concat = string.gmatch, table.concat
local ceil, floor = math.ceil, math.floor
@@ -42,7 +40,6 @@ local todimen = string.todimen
local formatters = string.formatters
local nodes = nodes
-local node = node
local trackers = trackers
local attributes = attributes
local context = context
@@ -54,7 +51,11 @@ local texgetcount = tex.getcount
local texgetdimen = tex.getdimen
local texset = tex.set
local texsetdimen = tex.setdimen
+local texsetcount = tex.setcount
local texnest = tex.nest
+local texgetbox = tex.getbox
+
+local buildpage = tex.triggerbuildpage
local variables = interfaces.variables
local implement = interfaces.implement
@@ -72,8 +73,8 @@ local v_noheight = variables.noheight
local v_nodepth = variables.nodepth
local v_line = variables.line
local v_halfline = variables.halfline
-local v_line_m = "-" .. variables.line
-local v_halfline_m = "-" .. variables.halfline
+local v_line_m = "-" .. v_line
+local v_halfline_m = "-" .. v_halfline
local v_first = variables.first
local v_last = variables.last
local v_top = variables.top
@@ -118,8 +119,6 @@ local nuts = nodes.nuts
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local getnext = nuts.getnext
local setlink = nuts.setlink
local getprev = nuts.getprev
@@ -150,6 +149,7 @@ local find_node_tail = nuts.tail
local flush_node = nuts.flush_node
local traverse_nodes = nuts.traverse
local traverse_nodes_id = nuts.traverse_id
+local insert_node_after = nuts.insert_after
local insert_node_before = nuts.insert_before
local remove_node = nuts.remove
local count_nodes = nuts.countall
@@ -171,6 +171,7 @@ local new_rule = nodepool.rule
local nodecodes = nodes.nodecodes
local skipcodes = nodes.skipcodes
+local penaltycodes = nodes.penaltycodes
local penalty_code = nodecodes.penalty
local kern_code = nodecodes.kern
@@ -180,16 +181,21 @@ local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local localpar_code = nodecodes.localpar
-local userskip_code = skipcodes.userskip
-local lineskip_code = skipcodes.lineskip
-local baselineskip_code = skipcodes.baselineskip
-local parskip_code = skipcodes.parskip
+local linebreak_code = penaltycodes.linebreakpenalty
+
+local userskip_code = skipcodes.userskip
+local lineskip_code = skipcodes.lineskip
+local baselineskip_code = skipcodes.baselineskip
+local parskip_code = skipcodes.parskip
+local topskip_code = skipcodes.topskip
+local splittopskip_code = skipcodes.splittopskip
+
local abovedisplayskip_code = skipcodes.abovedisplayskip
local belowdisplayskip_code = skipcodes.belowdisplayskip
local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
-local topskip_code = skipcodes.topskip
-local splittopskip_code = skipcodes.splittopskip
+
+local properties = nodes.properties.data
local vspacing = builders.vspacing or { }
builders.vspacing = vspacing
@@ -695,20 +701,22 @@ local function snap_topskip(current,method)
return w, 0
end
-local categories = allocate {
- [0] = 'discard',
- [1] = 'largest',
- [2] = 'force' ,
- [3] = 'penalty',
- [4] = 'add' ,
- [5] = 'disable',
- [6] = 'nowhite',
- [7] = 'goback',
- [8] = 'together', -- not used (?)
- [9] = 'overlay',
- [10] = 'notopskip',
+local categories = {
+ [0] = "discard",
+ [1] = "largest",
+ [2] = "force",
+ [3] = "penalty",
+ [4] = "add",
+ [5] = "disable",
+ [6] = "nowhite",
+ [7] = "goback",
+ [8] = "packed",
+ [9] = "overlay",
+ [10] = "enable",
+ [11] = "notopskip",
}
+categories = allocate(table.swapped(categories,categories))
vspacing.categories = categories
function vspacing.tocategories(str)
@@ -740,14 +748,16 @@ storage.register("builders/vspacing/data/skip", vspacingdata.skip, "builders.vsp
do -- todo: interface.variables and properties
- local P, C, R, S, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc
+ local P, C, R, S, Cc, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
vspacing.fixed = false
local map = vspacingdata.map
local skip = vspacingdata.skip
- local multiplier = C(S("+-")^0 * R("09")^1) * P("*")
+ local sign = S("+-")^0
+ local multiplier = C(sign * R("09")^1) * P("*")
+ local singlefier = Cs(sign * Cc(1))
local separator = S(", ")
local category = P(":") * C((1-separator)^1)
local keyword = C((1-category-separator)^1)
@@ -773,6 +783,7 @@ do -- todo: interface.variables and properties
local ctx_flushblankhandling = context.flushblankhandling
local ctx_addpredefinedblankskip = context.addpredefinedblankskip
local ctx_addaskedblankskip = context.addaskedblankskip
+ local ctx_setblankpacked = context.setblankpacked
local ctx_pushlogger = context.pushlogger
local ctx_startblankhandling = context.startblankhandling
@@ -781,6 +792,8 @@ do -- todo: interface.variables and properties
local pattern = nil
+ local packed = categories.packed
+
local function handler(amount, keyword, detail)
if not keyword then
report_vspacing("unknown directive %a",s)
@@ -794,7 +807,9 @@ do -- todo: interface.variables and properties
ctx_flexibleblankskip()
elseif keyword == k_category then
local category = tonumber(detail)
- if category then
+ if category == packed then
+ ctx_setblankpacked()
+ elseif category then
ctx_setblankcategory(category)
ctx_flushblankhandling()
end
@@ -820,7 +835,7 @@ do -- todo: interface.variables and properties
end
end
- local splitter = ((multiplier + Cc(1)) * keyword * (category + Cc(false))) / handler
+ local splitter = ((multiplier + singlefier) * keyword * (category + Cc(false))) / handler
pattern = (splitter + separator^1)^0
function vspacing.analyze(str)
@@ -951,14 +966,14 @@ function vspacing.snapbox(n,how)
end
else
local h, d, ch, cd, lines, extra = snap_hlist("box",box,sv,ht,dp)
-setprop(box,"snapper",{
- ht = h,
- dp = d,
- ch = ch,
- cd = cd,
- extra = extra,
- current = current,
-})
+ setprop(box,"snapper",{
+ ht = h,
+ dp = d,
+ ch = ch,
+ cd = cd,
+ extra = extra,
+ current = current,
+ })
setwhd(box,wd,ch,cd)
if trace_vsnapping then
report_snapper("box list snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
@@ -983,689 +998,702 @@ end
-- We can register and copy the rule instead.
-local w, h, d = 0, 0, 0
------ w, h, d = 100*65536, 65536, 65536
+do
+
+ local w, h, d = 0, 0, 0
+ ----- w, h, d = 100*65536, 65536, 65536
-local function forced_skip(head,current,width,where,trace) -- looks old ... we have other tricks now
- if head == current then
- if getsubtype(head) == baselineskip_code then
- width = width - getwidth(head)
+ local function forced_skip(head,current,width,where,trace) -- looks old ... we have other tricks now
+ if head == current then
+ if getsubtype(head) == baselineskip_code then
+ width = width - getwidth(head)
+ end
end
+ if width == 0 then
+ -- do nothing
+ elseif where == "after" then
+ head, current = insert_node_after(head,current,new_rule(w,h,d))
+ head, current = insert_node_after(head,current,new_kern(width))
+ head, current = insert_node_after(head,current,new_rule(w,h,d))
+ else
+ local c = current
+ head, current = insert_node_before(head,current,new_rule(w,h,d))
+ head, current = insert_node_before(head,current,new_kern(width))
+ head, current = insert_node_before(head,current,new_rule(w,h,d))
+ current = c
+ end
+ if trace then
+ report_vspacing("inserting forced skip of %p",width)
+ end
+ return head, current
end
- if width == 0 then
- -- do nothing
- elseif where == "after" then
- head, current = insert_node_after(head,current,new_rule(w,h,d))
- head, current = insert_node_after(head,current,new_kern(width))
- head, current = insert_node_after(head,current,new_rule(w,h,d))
- else
- local c = current
- head, current = insert_node_before(head,current,new_rule(w,h,d))
- head, current = insert_node_before(head,current,new_kern(width))
- head, current = insert_node_before(head,current,new_rule(w,h,d))
- current = c
- end
- if trace then
- report_vspacing("inserting forced skip of %p",width)
- end
- return head, current
-end
--- penalty only works well when before skip
+ -- penalty only works well when before skip
-local discard = 0
-local largest = 1
-local force = 2
-local penalty = 3
-local add = 4
-local disable = 5
-local nowhite = 6
-local goback = 7
-local together = 8 -- not used (?)
-local overlay = 9
-local enable = 10
+ local discard = categories.discard
+ local largest = categories.largest
+ local force = categories.force
+ local penalty = categories.penalty
+ local add = categories.add
+ local disable = categories.disable
+ local nowhite = categories.nowhite
+ local goback = categories.goback
+ local packed = categories.packed
+ local overlay = categories.overlay
+ local enable = categories.enable
+ local notopskip = categories.notopskip
--- [whatsits][hlist][glue][glue][penalty]
+ -- [whatsits][hlist][glue][glue][penalty]
-local special_penalty_min = 32250
-local special_penalty_max = 35000
-local special_penalty_xxx = 0
+ local special_penalty_min = 32250
+ local special_penalty_max = 35000
+ local special_penalty_xxx = 0
--- this is rather messy and complex: we want to make sure that successive
--- header don't break but also make sure that we have at least a decent
--- break when we have succesive ones (often when testing)
+ -- this is rather messy and complex: we want to make sure that successive
+ -- header don't break but also make sure that we have at least a decent
+ -- break when we have succesive ones (often when testing)
--- todo: mark headers as such so that we can recognize them
+ -- todo: mark headers as such so that we can recognize them
-local specialmethods = { }
-local specialmethod = 1
+ local specialmethods = { }
+ local specialmethod = 1
-local properties = nodes.properties.data
-
-specialmethods[1] = function(pagehead,pagetail,start,penalty)
- --
- if not pagehead or penalty < special_penalty_min or penalty > special_penalty_max then
- return
- end
- local current = pagetail
- --
- -- nodes.showsimplelist(pagehead,0)
- --
- if trace_specials then
- report_specials("checking penalty %a",penalty)
- end
- while current do
- local id = getid(current)
- if id == penalty_code then
- local p = properties[current]
- if p then
- local p = p.special_penalty
- if not p then
- if trace_specials then
- report_specials(" regular penalty, continue")
- end
- elseif p == penalty then
- if trace_specials then
- report_specials(" context penalty %a, same level, overloading",p)
- end
- return special_penalty_xxx
- elseif p > special_penalty_min and p < special_penalty_max then
- if penalty < p then
+ specialmethods[1] = function(pagehead,pagetail,start,penalty)
+ --
+ if not pagehead or penalty < special_penalty_min or penalty > special_penalty_max then
+ return
+ end
+ local current = pagetail
+ --
+ -- nodes.showsimplelist(pagehead,0)
+ --
+ if trace_specials then
+ report_specials("checking penalty %a",penalty)
+ end
+ while current do
+ local id = getid(current)
+ if id == penalty_code then
+ local p = properties[current]
+ if p then
+ local p = p.special_penalty
+ if not p then
if trace_specials then
- report_specials(" context penalty %a, lower level, overloading",p)
+ report_specials(" regular penalty, continue")
end
- return special_penalty_xxx
- else
+ elseif p == penalty then
if trace_specials then
- report_specials(" context penalty %a, higher level, quitting",p)
+ report_specials(" context penalty %a, same level, overloading",p)
end
- return
- end
- elseif trace_specials then
- report_specials(" context penalty %a, higher level, continue",p)
- end
- else
- local p = getpenalty(current)
- if p < 10000 then
- -- assume some other mechanism kicks in so we seem to have content
- if trace_specials then
- report_specials(" regular penalty %a, quitting",p)
+ return special_penalty_xxx
+ elseif p > special_penalty_min and p < special_penalty_max then
+ if penalty < p then
+ if trace_specials then
+ report_specials(" context penalty %a, lower level, overloading",p)
+ end
+ return special_penalty_xxx
+ else
+ if trace_specials then
+ report_specials(" context penalty %a, higher level, quitting",p)
+ end
+ return
+ end
+ elseif trace_specials then
+ report_specials(" context penalty %a, higher level, continue",p)
end
- break
else
- if trace_specials then
- report_specials(" regular penalty %a, continue",p)
+ local p = getpenalty(current)
+ if p < 10000 then
+ -- assume some other mechanism kicks in so we seem to have content
+ if trace_specials then
+ report_specials(" regular penalty %a, quitting",p)
+ end
+ break
+ else
+ if trace_specials then
+ report_specials(" regular penalty %a, continue",p)
+ end
end
end
end
+ current = getprev(current)
end
- current = getprev(current)
- end
- -- none found, so no reson to be special
- if trace_specials then
- if pagetail then
- report_specials(" context penalty, discarding, nothing special")
- else
- report_specials(" context penalty, discarding, nothing preceding")
+ -- none found, so no reson to be special
+ if trace_specials then
+ if pagetail then
+ report_specials(" context penalty, discarding, nothing special")
+ else
+ report_specials(" context penalty, discarding, nothing preceding")
+ end
end
+ return special_penalty_xxx
end
- return special_penalty_xxx
-end
--- specialmethods[2] : always put something before and use that as to-be-changed
---
--- we could inject a vadjust to force a recalculation .. a mess
---
--- So, the next is far from robust and okay but for the moment this overlaying
--- has to do. Always test this with the examples in spec-ver.mkvi!
-
-local function check_experimental_overlay(head,current)
- local p = nil
- local c = current
- local n = nil
- local function overlay(p,n,mvl)
- local p_wd, p_ht, p_dp = getwhd(p)
- local n_wd, n_ht, n_dp = getwhd(n)
- local skips = 0
- --
- -- We deal with this at the tex end .. we don't see spacing .. enabling this code
- -- is probably harmless but then we need to test it.
- --
- local c = getnext(p)
- while c and c ~= n do
- local id = getid(c)
- if id == glue_code then
- skips = skips + getwidth(c)
- elseif id == kern_code then
- skips = skips + getkern(c)
+ -- This will be replaced after 0.80+ when we have a more robust look-back and
+ -- can look at the bigger picture.
+
+ -- todo: look back and when a special is there before a list is seen penalty keep ut
+
+ -- we now look back a lot, way too often
+
+ -- userskip
+ -- lineskip
+ -- baselineskip
+ -- parskip
+ -- abovedisplayskip
+ -- belowdisplayskip
+ -- abovedisplayshortskip
+ -- belowdisplayshortskip
+ -- topskip
+ -- splittopskip
+
+ -- we could inject a vadjust to force a recalculation .. a mess
+ --
+ -- So, the next is far from robust and okay but for the moment this overlaying
+ -- has to do. Always test this with the examples in spac-ver.mkvi!
+
+ local function check_experimental_overlay(head,current)
+ local p = nil
+ local c = current
+ local n = nil
+ local function overlay(p,n,mvl)
+ local p_wd, p_ht, p_dp = getwhd(p)
+ local n_wd, n_ht, n_dp = getwhd(n)
+ local skips = 0
+ --
+ -- We deal with this at the tex end .. we don't see spacing .. enabling this code
+ -- is probably harmless but then we need to test it.
+ --
+ -- we could calculate this before we call
+ --
+ -- problem: prev list and next list can be unconnected
+ --
+ local c = getnext(p)
+ local l = c
+ while c and c ~= n do
+ local id = getid(c)
+ if id == glue_code then
+ skips = skips + getwidth(c)
+ elseif id == kern_code then
+ skips = skips + getkern(c)
+ end
+ l = c
+ c = getnext(c)
end
- c = getnext(c)
- end
- --
- local delta = n_ht + skips + p_dp
- texsetdimen("global","d_spac_overlay",-delta) -- for tracing
- local k = new_kern(-delta)
- if n_ht > p_ht then
+ local c = getprev(n)
+ while c and c ~= n and c ~= l do
+ local id = getid(c)
+ if id == glue_code then
+ skips = skips + getwidth(c)
+ elseif id == kern_code then
+ skips = skips + getkern(c)
+ end
+ c = getprev(c)
+ end
+ --
+ local delta = n_ht + skips + p_dp
+ texsetdimen("global","d_spac_overlay",-delta) -- for tracing
-- we should adapt pagetotal ! (need a hook for that) .. now we have the wrong pagebreak
- setheight(p,n_ht)
- end
- insert_node_before(head,n,k)
- if p == head then
- head = k
- end
- if trace_vspacing then
- report_vspacing("overlaying, prev height: %p, prev depth: %p, next height: %p, skips: %p, move up: %p",p_ht,p_dp,n_ht,skips,delta)
+ local k = new_kern(-delta)
+ head = insert_node_before(head,n,k)
+ if n_ht > p_ht then
+ local k = new_kern(n_ht-p_ht)
+ head = insert_node_before(head,p,k)
+ end
+ if trace_vspacing then
+ report_vspacing("overlaying, prev height: %p, prev depth: %p, next height: %p, skips: %p, move up: %p",p_ht,p_dp,n_ht,skips,delta)
+ end
+ return remove_node(head,current,true)
end
- return remove_node(head,current,true)
- end
- -- goto next line
- while c do
- local id = getid(c)
- if id == glue_code or id == penalty_code or id == kern_code then
- -- skip (actually, remove)
- c = getnext(c)
- elseif id == hlist_code then
- n = c
- break
- else
- break
- end
- end
- if n then
- -- we have a next line, goto prev line
- c = current
+ -- goto next line
while c do
local id = getid(c)
- if id == glue_code or id == penalty_code then
- c = getprev(c)
+ if id == glue_code or id == penalty_code or id == kern_code then
+ -- skip (actually, remove)
+ c = getnext(c)
elseif id == hlist_code then
- p = c
+ n = c
break
else
break
end
end
- if not p then
- if a_snapmethod == a_snapvbox then
- -- quit, we're not on the mvl
- else
- local c = tonut(texlists.page_head)
- while c and c ~= n do
- local id = getid(c)
- if id == hlist_code then
- p = c
- end
- c = getnext(c)
+ if n then
+ -- we have a next line, goto prev line
+ c = current
+ while c do
+ local id = getid(c)
+ if id == glue_code or id == penalty_code then -- kern ?
+ c = getprev(c)
+ elseif id == hlist_code then
+ p = c
+ break
+ else
+ break
end
- if p and p ~= n then
- return overlay(p,n,true)
+ end
+ if not p then
+ if a_snapmethod == a_snapvbox then
+ -- quit, we're not on the mvl
+ else
+ -- inefficient when we're at the end of a page
+ local c = tonut(texlists.page_head)
+ while c and c ~= n do
+ local id = getid(c)
+ if id == hlist_code then
+ p = c
+ end
+ c = getnext(c)
+ end
+ if p and p ~= n then
+ return overlay(p,n,true)
+ end
end
+ elseif p ~= n then
+ return overlay(p,n,false)
end
- elseif p ~= n then
- return overlay(p,n,false)
end
+ -- in fact, we could try again later ... so then no remove (a few tries)
+ return remove_node(head, current, true)
end
- -- in fact, we could try again later ... so then no remove (a few tries)
- return remove_node(head, current, true)
-end
-
--- This will be replaced after 0.80+ when we have a more robust look-back and
--- can look at the bigger picture.
-
--- todo: look back and when a special is there before a list is seen penalty keep ut
-
--- we now look back a lot, way too often
-
--- userskip
--- lineskip
--- baselineskip
--- parskip
--- abovedisplayskip
--- belowdisplayskip
--- abovedisplayshortskip
--- belowdisplayshortskip
--- topskip
--- splittopskip
-
-local experiment = true directives.register("vspacing.experiment",function(v) experiment = v end)
-
-local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail
- if trace then
- reset_tracing(head)
- end
- local current, oldhead = head, head
- local glue_order, glue_data, force_glue = 0, nil, false
- local penalty_order, penalty_data, natural_penalty, special_penalty = 0, nil, nil, nil
- local parskip, ignore_parskip, ignore_following, ignore_whitespace, keep_together = nil, false, false, false, false
- local lastsnap = nil
- --
- -- todo: keep_together: between headers
- --
- local pagehead = nil
- local pagetail = nil
- local function getpagelist()
- if not pagehead then
- pagehead = texlists.page_head
- if pagehead then
- pagehead = tonut(pagehead)
- pagetail = find_node_tail(pagehead) -- no texlists.page_tail yet-- no texlists.page_tail yet
- end
- end
- end
- --
- local function compensate(n)
- local g = 0
- while n and getid(n) == glue_code do
- g = g + getwidth(n)
- n = getnext(n)
+ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also pass tail
+ if trace then
+ reset_tracing(head)
end
- if n then
- local p = getprop(n,"snapper")
- if p then
- local extra = p.extra
- if extra and extra < 0 then -- hm, extra can be unset ... needs checking
- local h = p.ch -- getheight(n)
- -- maybe an extra check
- -- if h - extra < g then
- setheight(n,h-2*extra)
- p.extra = 0
- if trace_vsnapping then
- report_snapper("removed extra space at top: %p",extra)
- end
- -- end
+ local current, oldhead = head, head
+ local glue_order, glue_data, force_glue = 0, nil, false
+ local penalty_order, penalty_data, natural_penalty, special_penalty = 0, nil, nil, nil
+ local parskip, ignore_parskip, ignore_following, ignore_whitespace, keep_together = nil, false, false, false, false
+ local lastsnap = nil
+ --
+ -- todo: keep_together: between headers
+ --
+ local pagehead = nil
+ local pagetail = nil
+
+ local function getpagelist()
+ if not pagehead then
+ pagehead = texlists.page_head
+ if pagehead then
+ pagehead = tonut(pagehead)
+ pagetail = find_node_tail(pagehead) -- no texlists.page_tail yet-- no texlists.page_tail yet
end
end
- return n
end
- end
- --
- local function removetopsnap()
- getpagelist()
- if pagehead then
- local n = pagehead and compensate(pagehead)
- if n and n ~= pagetail then
- local p = getprop(pagetail,"snapper")
+ --
+ local function compensate(n)
+ local g = 0
+ while n and getid(n) == glue_code do
+ g = g + getwidth(n)
+ n = getnext(n)
+ end
+ if n then
+ local p = getprop(n,"snapper")
if p then
- local e = p.extra
- if e and e < 0 then
- local t = texget("pagetotal")
- if t > 0 then
- local g = texget("pagegoal") -- 1073741823 is signal
- local d = g - t
- if d < -e then
- local penalty = new_penalty(1000000)
- setlink(penalty,head)
- head = penalty
- report_snapper("force pagebreak due to extra space at bottom: %p",e)
+ local extra = p.extra
+ if extra and extra < 0 then -- hm, extra can be unset ... needs checking
+ local h = p.ch -- getheight(n)
+ -- maybe an extra check
+ -- if h - extra < g then
+ setheight(n,h-2*extra)
+ p.extra = 0
+ if trace_vsnapping then
+ report_snapper("removed extra space at top: %p",extra)
end
- end
+ -- end
end
end
+ return n
end
- elseif head then
- compensate(head)
end
- end
- --
- local function getavailable()
- getpagelist()
- if pagehead then
- local t = texget("pagetotal")
- if t > 0 then
- local g = texget("pagegoal")
- return g - t
+ --
+ local function removetopsnap()
+ getpagelist()
+ if pagehead then
+ local n = pagehead and compensate(pagehead)
+ if n and n ~= pagetail then
+ local p = getprop(pagetail,"snapper")
+ if p then
+ local e = p.extra
+ if e and e < 0 then
+ local t = texget("pagetotal")
+ if t > 0 then
+ local g = texget("pagegoal") -- 1073741823 is signal
+ local d = g - t
+ if d < -e then
+ local penalty = new_penalty(1000000)
+ setlink(penalty,head)
+ head = penalty
+ report_snapper("force pagebreak due to extra space at bottom: %p",e)
+ end
+ end
+ end
+ end
+ end
+ elseif head then
+ compensate(head)
end
end
- return false
- end
- --
- local function flush(why)
- if penalty_data then
- local p = new_penalty(penalty_data)
- if trace then
- trace_done("flushed due to " .. why,p)
- end
- if penalty_data >= 10000 then -- or whatever threshold?
- local prev = getprev(current)
- if getid(prev) == glue_code then -- maybe go back more, or maybe even push back before any glue
- -- tricky case: spacing/grid-007.tex: glue penalty glue
- head = insert_node_before(head,prev,p)
- else
- head = insert_node_before(head,current,p)
+ --
+ local function getavailable()
+ getpagelist()
+ if pagehead then
+ local t = texget("pagetotal")
+ if t > 0 then
+ local g = texget("pagegoal")
+ return g - t
end
- else
- head = insert_node_before(head,current,p)
- end
- -- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
- local props = properties[p]
- if props then
- props.special_penalty = special_penalty or penalty_data
- else
- properties[p] = {
- special_penalty = special_penalty or penalty_data
- }
end
- -- end
+ return false
end
- if glue_data then
- if force_glue then
+ --
+ local function flush(why)
+ if penalty_data then
+ local p = new_penalty(penalty_data)
if trace then
- trace_done("flushed due to forced " .. why,glue_data)
+ trace_done("flushed due to " .. why,p)
end
- head = forced_skip(head,current,getwidth(glue_data,width),"before",trace)
- flush_node(glue_data)
- else
- local width, stretch, shrink = getglue(glue_data)
- if width ~= 0 then
- if trace then
- trace_done("flushed due to non zero " .. why,glue_data)
+ if penalty_data >= 10000 then -- or whatever threshold?
+ local prev = getprev(current)
+ if getid(prev) == glue_code then -- maybe go back more, or maybe even push back before any glue
+ -- tricky case: spacing/grid-007.tex: glue penalty glue
+ head = insert_node_before(head,prev,p)
+ else
+ head = insert_node_before(head,current,p)
end
- head = insert_node_before(head,current,glue_data)
- elseif stretch ~= 0 or shrink ~= 0 then
+ else
+ head = insert_node_before(head,current,p)
+ end
+ -- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
+ local props = properties[p]
+ if props then
+ props.special_penalty = special_penalty or penalty_data
+ else
+ properties[p] = {
+ special_penalty = special_penalty or penalty_data
+ }
+ end
+ -- end
+ end
+ if glue_data then
+ if force_glue then
if trace then
- trace_done("flushed due to stretch/shrink in" .. why,glue_data)
+ trace_done("flushed due to forced " .. why,glue_data)
end
- head = insert_node_before(head,current,glue_data)
- else
- -- report_vspacing("needs checking (%s): %p",skipcodes[getsubtype(glue_data)],w)
+ head = forced_skip(head,current,getwidth(glue_data,width),"before",trace)
flush_node(glue_data)
+ else
+ local width, stretch, shrink = getglue(glue_data)
+ if width ~= 0 then
+ if trace then
+ trace_done("flushed due to non zero " .. why,glue_data)
+ end
+ head = insert_node_before(head,current,glue_data)
+ elseif stretch ~= 0 or shrink ~= 0 then
+ if trace then
+ trace_done("flushed due to stretch/shrink in" .. why,glue_data)
+ end
+ head = insert_node_before(head,current,glue_data)
+ else
+ -- report_vspacing("needs checking (%s): %p",skipcodes[getsubtype(glue_data)],w)
+ flush_node(glue_data)
+ end
end
end
- end
+ if trace then
+ trace_node(current)
+ end
+ glue_order, glue_data, force_glue = 0, nil, false
+ penalty_order, penalty_data, natural_penalty = 0, nil, nil
+ parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false
+ end
+ --
+ if trace_vsnapping then
+ report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p",
+ texgetdimen("globalbodyfontstrutheight"),
+ texgetdimen("globalbodyfontstrutdepth"),
+ texgetdimen("bodyfontstrutheight"),
+ texgetdimen("bodyfontstrutdepth")
+ )
+ end
if trace then
- trace_node(current)
+ trace_info("start analyzing",where,what)
end
- glue_order, glue_data, force_glue = 0, nil, false
- penalty_order, penalty_data, natural_penalty = 0, nil, nil
- parskip, ignore_parskip, ignore_following, ignore_whitespace = nil, false, false, false
- end
- --
- if trace_vsnapping then
- report_snapper("global ht/dp = %p/%p, local ht/dp = %p/%p",
- texgetdimen("globalbodyfontstrutheight"),
- texgetdimen("globalbodyfontstrutdepth"),
- texgetdimen("bodyfontstrutheight"),
- texgetdimen("bodyfontstrutdepth")
- )
- end
- if trace then
- trace_info("start analyzing",where,what)
- end
- if snap and where == "page" then
- removetopsnap()
- end
- while current do
- local id = getid(current)
- if id == hlist_code or id == vlist_code then
- -- needs checking, why so many calls
- if snap then
- lastsnap = nil
- local list = getlist(current)
- local s = getattr(current,a_snapmethod)
- if not s then
- -- if trace_vsnapping then
- -- report_snapper("mvl list not snapped")
- -- end
- elseif s == 0 then
- if trace_vsnapping then
- report_snapper("mvl %a not snapped, already done: %s",nodecodes[id],listtoutf(list))
- end
- else
- local sv = snapmethods[s]
- if sv then
- -- check if already snapped
- local done = list and already_done(id,list,a_snapmethod)
- if done then
- -- assume that the box is already snapped
- if trace_vsnapping then
- local w, h, d = getwhd(current)
- report_snapper("mvl list already snapped at (%p,%p): %s",h,d,listtoutf(list))
- end
- else
- local h, d, ch, cd, lines, extra = snap_hlist("mvl",current,sv,false,false)
- lastsnap = {
- ht = h,
- dp = d,
- ch = ch,
- cd = cd,
- extra = extra,
- current = current,
- }
- setprop(current,"snapper",lastsnap)
- if trace_vsnapping then
- report_snapper("mvl %a snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
- nodecodes[id],h,d,ch,cd,sv.name,sv.specification,where,lines,listtoutf(list))
+ if snap and where == "page" then
+ removetopsnap()
+ end
+ while current do
+ local id = getid(current)
+ if id == hlist_code or id == vlist_code then
+ -- needs checking, why so many calls
+ if snap then
+ lastsnap = nil
+ local list = getlist(current)
+ local s = getattr(current,a_snapmethod)
+ if not s then
+ -- if trace_vsnapping then
+ -- report_snapper("mvl list not snapped")
+ -- end
+ elseif s == 0 then
+ if trace_vsnapping then
+ report_snapper("mvl %a not snapped, already done: %s",nodecodes[id],listtoutf(list))
+ end
+ else
+ local sv = snapmethods[s]
+ if sv then
+ -- check if already snapped
+ local done = list and already_done(id,list,a_snapmethod)
+ if done then
+ -- assume that the box is already snapped
+ if trace_vsnapping then
+ local w, h, d = getwhd(current)
+ report_snapper("mvl list already snapped at (%p,%p): %s",h,d,listtoutf(list))
+ end
+ else
+ local h, d, ch, cd, lines, extra = snap_hlist("mvl",current,sv,false,false)
+ lastsnap = {
+ ht = h,
+ dp = d,
+ ch = ch,
+ cd = cd,
+ extra = extra,
+ current = current,
+ }
+ setprop(current,"snapper",lastsnap)
+ if trace_vsnapping then
+ report_snapper("mvl %a snapped from (%p,%p) to (%p,%p) using method %a (%s) for %a (%s lines): %s",
+ nodecodes[id],h,d,ch,cd,sv.name,sv.specification,where,lines,listtoutf(list))
+ end
end
+ elseif trace_vsnapping then
+ report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
end
- elseif trace_vsnapping then
- report_snapper("mvl %a not snapped due to unknown snap specification: %s",nodecodes[id],listtoutf(list))
+ setattr(current,a_snapmethod,0)
end
- setattr(current,a_snapmethod,0)
+ else
+ --
end
- else
- --
- end
- -- tex.prevdepth = 0
- flush("list")
- current = getnext(current)
- elseif id == penalty_code then
- -- natural_penalty = getpenalty(current)
- -- if trace then
- -- trace_done("removed penalty",current)
- -- end
- -- head, current = remove_node(head, current, true)
- current = getnext(current)
- elseif id == kern_code then
- if snap and trace_vsnapping and getkern(current) ~= 0 then
- report_snapper("kern of %p kept",getkern(current))
- end
- flush("kern")
- current = getnext(current)
- elseif id == glue_code then
- local subtype = getsubtype(current)
- if subtype == userskip_code then
- local sc = getattr(current,a_skipcategory) -- has no default, no unset (yet)
- local so = getattr(current,a_skiporder) or 1 -- has 1 default, no unset (yet)
- local sp = getattr(current,a_skippenalty) -- has no default, no unset (yet)
- if sp and sc == penalty then
- if where == "page" then
- getpagelist()
- local p = specialmethods[specialmethod](pagehead,pagetail,current,sp)
- if p then
- -- todo: other tracer
- --
- -- if trace then
- -- trace_skip("previous special penalty %a is changed to %a using method %a",sp,p,specialmethod)
- -- end
- special_penalty = sp
- sp = p
+ -- tex.prevdepth = 0
+ flush("list")
+ current = getnext(current)
+ elseif id == penalty_code then
+ -- natural_penalty = getpenalty(current)
+ -- if trace then
+ -- trace_done("removed penalty",current)
+ -- end
+ -- head, current = remove_node(head, current, true)
+ current = getnext(current)
+ elseif id == kern_code then
+ if snap and trace_vsnapping and getkern(current) ~= 0 then
+ report_snapper("kern of %p kept",getkern(current))
+ end
+ flush("kern")
+ current = getnext(current)
+ elseif id == glue_code then
+ local subtype = getsubtype(current)
+ if subtype == userskip_code then
+ local sc = getattr(current,a_skipcategory) -- has no default, no unset (yet)
+ local so = getattr(current,a_skiporder) or 1 -- has 1 default, no unset (yet)
+ local sp = getattr(current,a_skippenalty) -- has no default, no unset (yet)
+ if sp and sc == penalty then
+ if where == "page" then
+ getpagelist()
+ local p = specialmethods[specialmethod](pagehead,pagetail,current,sp)
+ if p then
+ -- todo: other tracer
+ --
+ -- if trace then
+ -- trace_skip("previous special penalty %a is changed to %a using method %a",sp,p,specialmethod)
+ -- end
+ special_penalty = sp
+ sp = p
+ end
end
- end
- if not penalty_data then
- penalty_data = sp
- elseif penalty_order < so then
- penalty_order, penalty_data = so, sp
- elseif penalty_order == so and sp > penalty_data then
- penalty_data = sp
- end
- if trace then
- trace_skip("penalty in skip",sc,so,sp,current)
- end
- head, current = remove_node(head, current, true)
- elseif not sc then -- if not sc then
- if glue_data then
- if trace then
- trace_done("flush",glue_data)
+ if not penalty_data then
+ penalty_data = sp
+ elseif penalty_order < so then
+ penalty_order, penalty_data = so, sp
+ elseif penalty_order == so and sp > penalty_data then
+ penalty_data = sp
end
- head = insert_node_before(head,current,glue_data)
if trace then
- trace_natural("natural",current)
+ trace_skip("penalty in skip",sc,so,sp,current)
end
- current = getnext(current)
- else
- -- not look back across head
- -- todo: prev can be whatsit (latelua)
- local previous = getprev(current)
- if previous and getid(previous) == glue_code and getsubtype(previous) == userskip_code then
- local pwidth, pstretch, pshrink, pstretch_order, pshrink_order = getglue(previous)
- local cwidth, cstretch, cshrink, cstretch_order, cshrink_order = getglue(current)
- if pstretch_order == 0 and pshrink_order == 0 and cstretch_order == 0 and cshrink_order == 0 then
- setglue(previous,pwidth + cwidth, pstretch + cstretch, pshrink + cshrink)
- if trace then
- trace_natural("removed",current)
- end
- head, current = remove_node(head, current, true)
- if trace then
- trace_natural("collapsed",previous)
+ head, current = remove_node(head, current, true)
+ elseif not sc then -- if not sc then
+ if glue_data then
+ if trace then
+ trace_done("flush",glue_data)
+ end
+ head = insert_node_before(head,current,glue_data)
+ if trace then
+ trace_natural("natural",current)
+ end
+ current = getnext(current)
+ else
+ -- not look back across head
+ -- todo: prev can be whatsit (latelua)
+ local previous = getprev(current)
+ if previous and getid(previous) == glue_code and getsubtype(previous) == userskip_code then
+ local pwidth, pstretch, pshrink, pstretch_order, pshrink_order = getglue(previous)
+ local cwidth, cstretch, cshrink, cstretch_order, cshrink_order = getglue(current)
+ if pstretch_order == 0 and pshrink_order == 0 and cstretch_order == 0 and cshrink_order == 0 then
+ setglue(previous,pwidth + cwidth, pstretch + cstretch, pshrink + cshrink)
+ if trace then
+ trace_natural("removed",current)
+ end
+ head, current = remove_node(head, current, true)
+ if trace then
+ trace_natural("collapsed",previous)
+ end
+ else
+ if trace then
+ trace_natural("filler",current)
+ end
+ current = getnext(current)
end
else
if trace then
- trace_natural("filler",current)
+ trace_natural("natural (no prev)",current)
end
current = getnext(current)
end
- else
+ end
+ glue_order, glue_data = 0, nil
+ elseif sc == disable or sc == enable then
+ local next = getnext(current)
+ if next then
+ ignore_following = sc == disable
if trace then
- trace_natural("natural (no prev)",current)
+ trace_skip(sc == disable and "disable" or "enable",sc,so,sp,current)
end
- current = getnext(current)
+ head, current = remove_node(head, current, true)
+ else
+ current = next
end
- end
- glue_order, glue_data = 0, nil
- elseif sc == disable or sc == enable then
- local next = getnext(current)
- if not experiment or next then
- ignore_following = sc == disable
+ elseif sc == packed then
if trace then
- trace_skip(sc == disable and "disable" or "enable",sc,so,sp,current)
+ trace_skip("packed",sc,so,sp,current)
end
+ -- can't happen !
head, current = remove_node(head, current, true)
- else
- current = next
- end
- elseif sc == together then
- local next = getnext(current)
- if not experiment or next then
- keep_together = true
+ elseif sc == nowhite then
+ local next = getnext(current)
+ if next then
+ ignore_whitespace = true
+ head, current = remove_node(head, current, true)
+ else
+ current = next
+ end
+ elseif sc == discard then
if trace then
- trace_skip("together",sc,so,sp,current)
+ trace_skip("discard",sc,so,sp,current)
end
head, current = remove_node(head, current, true)
- else
- current = next
- end
- elseif sc == nowhite then
- local next = getnext(current)
- if not experiment or next then
- ignore_whitespace = true
- head, current = remove_node(head, current, true)
- else
- current = next
- end
- elseif sc == discard then
- if trace then
- trace_skip("discard",sc,so,sp,current)
- end
- head, current = remove_node(head, current, true)
- elseif sc == overlay then
- -- todo (overlay following line over previous
- if trace then
- trace_skip("overlay",sc,so,sp,current)
- end
- -- beware: head can actually be after the affected nodes as
- -- we look back ... some day head will the real head
- head, current = check_experimental_overlay(head,current,a_snapmethod)
- elseif ignore_following then
- if trace then
- trace_skip("disabled",sc,so,sp,current)
- end
- head, current = remove_node(head, current, true)
- elseif not glue_data then
- if trace then
- trace_skip("assign",sc,so,sp,current)
- end
- glue_order = so
- head, current, glue_data = remove_node(head, current)
- elseif glue_order < so then
- if trace then
- trace_skip("force",sc,so,sp,current)
- end
- glue_order = so
- flush_node(glue_data)
- head, current, glue_data = remove_node(head, current)
- elseif glue_order == so then
- -- is now exclusive, maybe support goback as combi, else why a set
- if sc == largest then
- local cw = getwidth(current)
- local gw = getwidth(glue_data)
- if cw > gw then
- if trace then
- trace_skip("largest",sc,so,sp,current)
- end
- flush_node(glue_data)
- head, current, glue_data = remove_node(head,current)
- else
- if trace then
- trace_skip("remove smallest",sc,so,sp,current)
- end
- head, current = remove_node(head, current, true)
+ elseif sc == overlay then
+ -- todo (overlay following line over previous
+ if trace then
+ trace_skip("overlay",sc,so,sp,current)
end
- elseif sc == goback then
+ -- beware: head can actually be after the affected nodes as
+ -- we look back ... some day head will the real head
+ head, current = check_experimental_overlay(head,current,a_snapmethod)
+ elseif ignore_following then
if trace then
- trace_skip("goback",sc,so,sp,current)
+ trace_skip("disabled",sc,so,sp,current)
end
- flush_node(glue_data)
- head, current, glue_data = remove_node(head,current)
- elseif sc == force then
- -- last one counts, some day we can provide an accumulator and largest etc
- -- but not now
+ head, current = remove_node(head, current, true)
+ elseif not glue_data then
if trace then
- trace_skip("force",sc,so,sp,current)
+ trace_skip("assign",sc,so,sp,current)
end
- flush_node(glue_data)
+ glue_order = so
head, current, glue_data = remove_node(head, current)
- elseif sc == penalty then
+ elseif glue_order < so then
if trace then
- trace_skip("penalty",sc,so,sp,current)
+ trace_skip("force",sc,so,sp,current)
end
+ glue_order = so
flush_node(glue_data)
- glue_data = nil
- head, current = remove_node(head, current, true)
- elseif sc == add then
- if trace then
- trace_skip("add",sc,so,sp,current)
+ head, current, glue_data = remove_node(head, current)
+ elseif glue_order == so then
+ -- is now exclusive, maybe support goback as combi, else why a set
+ if sc == largest then
+ local cw = getwidth(current)
+ local gw = getwidth(glue_data)
+ if cw > gw then
+ if trace then
+ trace_skip("largest",sc,so,sp,current)
+ end
+ flush_node(glue_data)
+ head, current, glue_data = remove_node(head,current)
+ else
+ if trace then
+ trace_skip("remove smallest",sc,so,sp,current)
+ end
+ head, current = remove_node(head, current, true)
+ end
+ elseif sc == goback then
+ if trace then
+ trace_skip("goback",sc,so,sp,current)
+ end
+ flush_node(glue_data)
+ head, current, glue_data = remove_node(head,current)
+ elseif sc == force then
+ -- last one counts, some day we can provide an accumulator and largest etc
+ -- but not now
+ if trace then
+ trace_skip("force",sc,so,sp,current)
+ end
+ flush_node(glue_data)
+ head, current, glue_data = remove_node(head, current)
+ elseif sc == penalty then
+ if trace then
+ trace_skip("penalty",sc,so,sp,current)
+ end
+ flush_node(glue_data)
+ glue_data = nil
+ head, current = remove_node(head, current, true)
+ elseif sc == add then
+ if trace then
+ trace_skip("add",sc,so,sp,current)
+ end
+ local cwidth, cstretch, cshrink = getglue(current)
+ local gwidth, gstretch, gshrink = getglue(glue_data)
+ setglue(old,gwidth + cwidth, gstretch + cstretch, gshrink + cshrink)
+ -- toto: order
+ head, current = remove_node(head, current, true)
+ else
+ if trace then
+ trace_skip("unknown",sc,so,sp,current)
+ end
+ head, current = remove_node(head, current, true)
end
- local cwidth, cstretch, cshrink = getglue(current)
- local gwidth, gstretch, gshrink = getglue(glue_data)
- setglue(old,gwidth + cwidth, gstretch + cstretch, gshrink + cshrink)
- -- toto: order
- head, current = remove_node(head, current, true)
else
if trace then
trace_skip("unknown",sc,so,sp,current)
end
head, current = remove_node(head, current, true)
end
- else
- if trace then
- trace_skip("unknown",sc,so,sp,current)
+ if sc == force then
+ force_glue = true
end
- head, current = remove_node(head, current, true)
- end
- if sc == force then
- force_glue = true
- end
- elseif subtype == lineskip_code then
- if snap then
- local s = getattr(current,a_snapmethod)
- if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
- setwidth(current,0)
- if trace_vsnapping then
- report_snapper("lineskip set to zero")
+ elseif subtype == lineskip_code then
+ if snap then
+ local s = getattr(current,a_snapmethod)
+ if s and s ~= 0 then
+ setattr(current,a_snapmethod,0)
+ setwidth(current,0)
+ if trace_vsnapping then
+ report_snapper("lineskip set to zero")
+ end
+ else
+ if trace then
+ trace_skip("lineskip",sc,so,sp,current)
+ end
+ flush("lineskip")
end
else
if trace then
@@ -1673,21 +1701,21 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
flush("lineskip")
end
- else
- if trace then
- trace_skip("lineskip",sc,so,sp,current)
- end
- flush("lineskip")
- end
- current = getnext(current)
- elseif subtype == baselineskip_code then
- if snap then
- local s = getattr(current,a_snapmethod)
- if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
- setwidth(current,0)
- if trace_vsnapping then
- report_snapper("baselineskip set to zero")
+ current = getnext(current)
+ elseif subtype == baselineskip_code then
+ if snap then
+ local s = getattr(current,a_snapmethod)
+ if s and s ~= 0 then
+ setattr(current,a_snapmethod,0)
+ setwidth(current,0)
+ if trace_vsnapping then
+ report_snapper("baselineskip set to zero")
+ end
+ else
+ if trace then
+ trace_skip("baselineskip",sc,so,sp,current)
+ end
+ flush("baselineskip")
end
else
if trace then
@@ -1695,53 +1723,53 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
flush("baselineskip")
end
- else
- if trace then
- trace_skip("baselineskip",sc,so,sp,current)
- end
- flush("baselineskip")
- end
- current = getnext(current)
- elseif subtype == parskip_code then
- -- parskip always comes later
- if ignore_whitespace then
- if trace then
- trace_natural("ignored parskip",current)
- end
- head, current = remove_node(head, current, true)
- elseif glue_data then
- local w = getwidth(current)
- if (w ~= 0) and (w > getwidth(glue_data)) then
- glue_data = current
+ current = getnext(current)
+ elseif subtype == parskip_code then
+ -- parskip always comes later
+ if ignore_whitespace then
if trace then
- trace_natural("taking parskip",current)
+ trace_natural("ignored parskip",current)
+ end
+ head, current = remove_node(head, current, true)
+ elseif glue_data then
+ local w = getwidth(current)
+ if (w ~= 0) and (w > getwidth(glue_data)) then
+ glue_data = current
+ if trace then
+ trace_natural("taking parskip",current)
+ end
+ head, current = remove_node(head, current)
+ else
+ if trace then
+ trace_natural("removed parskip",current)
+ end
+ head, current = remove_node(head, current, true)
end
- head, current = remove_node(head, current)
else
if trace then
- trace_natural("removed parskip",current)
+ trace_natural("honored parskip",current)
end
- head, current = remove_node(head, current, true)
+ head, current, glue_data = remove_node(head, current)
end
- else
- if trace then
- trace_natural("honored parskip",current)
+ elseif subtype == topskip_code or subtype == splittopskip_code then
+ local next = getnext(current)
+ if next and getattr(next,a_skipcategory) == notopskip then
+ nuts.setglue(current) -- zero
end
- head, current, glue_data = remove_node(head, current)
- end
- elseif subtype == topskip_code or subtype == splittopskip_code then
- local next = getnext(current)
- if next and getattr(next,a_skipcategory) == 10 then -- no top skip
- nuts.setglue(current) -- zero
- end
- if snap then
- local s = getattr(current,a_snapmethod)
- if s and s ~= 0 then
- setattr(current,a_snapmethod,0)
- local sv = snapmethods[s]
- local w, cw = snap_topskip(current,sv)
- if trace_vsnapping then
- report_snapper("topskip snapped from %p to %p for %a",w,cw,where)
+ if snap then
+ local s = getattr(current,a_snapmethod)
+ if s and s ~= 0 then
+ setattr(current,a_snapmethod,0)
+ local sv = snapmethods[s]
+ local w, cw = snap_topskip(current,sv)
+ if trace_vsnapping then
+ report_snapper("topskip snapped from %p to %p for %a",w,cw,where)
+ end
+ else
+ if trace then
+ trace_skip("topskip",sc,so,sp,current)
+ end
+ flush("topskip")
end
else
if trace then
@@ -1749,198 +1777,201 @@ local function collapser(head,where,what,trace,snap,a_snapmethod) -- maybe also
end
flush("topskip")
end
- else
+ current = getnext(current)
+ elseif subtype == abovedisplayskip_code and remove_math_skips then
+ --
if trace then
- trace_skip("topskip",sc,so,sp,current)
+ trace_skip("above display skip (normal)",sc,so,sp,current)
end
- flush("topskip")
- end
- current = getnext(current)
- elseif subtype == abovedisplayskip_code and remove_math_skips then
- --
- if trace then
- trace_skip("above display skip (normal)",sc,so,sp,current)
- end
- flush("above display skip (normal)")
- current = getnext(current)
- --
- elseif subtype == belowdisplayskip_code and remove_math_skips then
- --
- if trace then
- trace_skip("below display skip (normal)",sc,so,sp,current)
- end
- flush("below display skip (normal)")
- current = getnext(current)
- --
- elseif subtype == abovedisplayshortskip_code and remove_math_skips then
- --
- if trace then
- trace_skip("above display skip (short)",sc,so,sp,current)
- end
- flush("above display skip (short)")
- current = getnext(current)
- --
- elseif subtype == belowdisplayshortskip_code and remove_math_skips then
- --
- if trace then
- trace_skip("below display skip (short)",sc,so,sp,current)
- end
- flush("below display skip (short)")
- current = getnext(current)
- --
- else -- other glue
- if snap and trace_vsnapping then
- local w = getwidth(current)
- if w ~= 0 then
- report_snapper("glue %p of type %a kept",w,skipcodes[subtype])
+ flush("above display skip (normal)")
+ current = getnext(current)
+ --
+ elseif subtype == belowdisplayskip_code and remove_math_skips then
+ --
+ if trace then
+ trace_skip("below display skip (normal)",sc,so,sp,current)
end
+ flush("below display skip (normal)")
+ current = getnext(current)
+ --
+ elseif subtype == abovedisplayshortskip_code and remove_math_skips then
+ --
+ if trace then
+ trace_skip("above display skip (short)",sc,so,sp,current)
+ end
+ flush("above display skip (short)")
+ current = getnext(current)
+ --
+ elseif subtype == belowdisplayshortskip_code and remove_math_skips then
+ --
+ if trace then
+ trace_skip("below display skip (short)",sc,so,sp,current)
+ end
+ flush("below display skip (short)")
+ current = getnext(current)
+ --
+ else -- other glue
+ if snap and trace_vsnapping then
+ local w = getwidth(current)
+ if w ~= 0 then
+ report_snapper("glue %p of type %a kept",w,skipcodes[subtype])
+ end
+ end
+ if trace then
+ trace_skip(formatters["glue of type %a"](subtype),sc,so,sp,current)
+ end
+ flush("some glue")
+ current = getnext(current)
end
- if trace then
- trace_skip(formatters["glue of type %a"](subtype),sc,so,sp,current)
- end
- flush("some glue")
+ else
+ flush(formatters["node with id %a"](id))
current = getnext(current)
end
- else
- flush(formatters["node with id %a"](id))
- current = getnext(current)
end
- end
- if trace then
- trace_info("stop analyzing",where,what)
- end
- -- if natural_penalty and (not penalty_data or natural_penalty > penalty_data) then
- -- penalty_data = natural_penalty
- -- end
- if trace and (glue_data or penalty_data) then
- trace_info("start flushing",where,what)
- end
- local tail
- if penalty_data then
- tail = find_node_tail(head)
- local p = new_penalty(penalty_data)
if trace then
- trace_done("result",p)
+ trace_info("stop analyzing",where,what)
+ end
+ -- if natural_penalty and (not penalty_data or natural_penalty > penalty_data) then
+ -- penalty_data = natural_penalty
+ -- end
+ if trace and (glue_data or penalty_data) then
+ trace_info("start flushing",where,what)
+ end
+ local tail
+ if penalty_data then
+ tail = find_node_tail(head)
+ local p = new_penalty(penalty_data)
+ if trace then
+ trace_done("result",p)
+ end
+ setlink(tail,p)
+ -- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
+ local props = properties[p]
+ if props then
+ props.special_penalty = special_penalty or penalty_data
+ else
+ properties[p] = {
+ special_penalty = special_penalty or penalty_data
+ }
+ end
+ -- end
end
- setlink(tail,p)
- -- if penalty_data > special_penalty_min and penalty_data < special_penalty_max then
- local props = properties[p]
- if props then
- props.special_penalty = special_penalty or penalty_data
+ if glue_data then
+ if not tail then tail = find_node_tail(head) end
+ if trace then
+ trace_done("result",glue_data)
+ end
+ if force_glue then
+ head, tail = forced_skip(head,tail,getwidth(glue_data),"after",trace)
+ flush_node(glue_data)
+ glue_data = nil
+ elseif tail then
+ setlink(tail,glue_data)
else
- properties[p] = {
- special_penalty = special_penalty or penalty_data
- }
+ head = glue_data
end
- -- end
- end
- if glue_data then
- if not tail then tail = find_node_tail(head) end
+ texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
+ end
if trace then
- trace_done("result",glue_data)
- end
- if force_glue then
- head, tail = forced_skip(head,tail,getwidth(glue_data),"after",trace)
- flush_node(glue_data)
- glue_data = nil
- elseif tail then
- setlink(tail,glue_data)
- else
- head = glue_data
+ if glue_data or penalty_data then
+ trace_info("stop flushing",where,what)
+ end
+ show_tracing(head)
+ if oldhead ~= head then
+ trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
+ end
end
- texnest[texnest.ptr].prevdepth = 0 -- appending to the list bypasses tex's prevdepth handler
+ return head, true
end
- if trace then
- if glue_data or penalty_data then
- trace_info("stop flushing",where,what)
- end
- show_tracing(head)
- if oldhead ~= head then
- trace_info("head has been changed from %a to %a",nodecodes[getid(oldhead)],nodecodes[getid(head)])
+
+ -- alignment after_output end box new_graf vmode_par hmode_par insert penalty before_display after_display
+ -- \par -> vmode_par
+ --
+ -- status.best_page_break
+ -- tex.lists.best_page_break
+ -- tex.lists.best_size (natural size to best_page_break)
+ -- tex.lists.least_page_cost (badness of best_page_break)
+ -- tex.lists.page_head
+ -- tex.lists.contrib_head
+
+ -- do
+
+ local stackhead, stacktail, stackhack = nil, nil, false
+
+ local function report(message,where,lst)
+ if lst and where then
+ report_vspacing(message,where,count_nodes(lst,true),nodeidstostring(lst))
+ else
+ report_vspacing(message,count_nodes(lst,true),nodeidstostring(lst))
end
end
- return head, true
-end
--- alignment after_output end box new_graf vmode_par hmode_par insert penalty before_display after_display
--- \par -> vmode_par
---
--- status.best_page_break
--- tex.lists.best_page_break
--- tex.lists.best_size (natural size to best_page_break)
--- tex.lists.least_page_cost (badness of best_page_break)
--- tex.lists.page_head
--- tex.lists.contrib_head
-
-local stackhead, stacktail, stackhack = nil, nil, false
-
-local function report(message,where,lst)
- if lst and where then
- report_vspacing(message,where,count_nodes(lst,true),nodeidstostring(lst))
- else
- report_vspacing(message,count_nodes(lst,true),nodeidstostring(lst))
- end
-end
+ -- ugly code: we get partial lists (check if this stack is still okay) ... and we run
+ -- into temp nodes (sigh)
--- ugly code: we get partial lists (check if this stack is still okay) ... and we run
--- into temp nodes (sigh)
-
-function vspacing.pagehandler(newhead,where)
- -- local newhead = texlists.contrib_head
- if newhead then
- newhead = tonut(newhead)
- local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
- local flush = false
- stackhack = true -- todo: only when grid snapping once enabled
- -- todo: fast check if head = tail
- for n in traverse_nodes(newhead) do -- we could just look for glue nodes
- local id = getid(n)
- if id ~= glue_code then
- flush = true
- elseif getsubtype(n) == userskip_code then
- if getattr(n,a_skipcategory) then
- stackhack = true
- else
+ function vspacing.pagehandler(newhead,where)
+ -- local newhead = texlists.contrib_head
+ if newhead then
+ newhead = tonut(newhead)
+ local newtail = find_node_tail(newhead) -- best pass that tail, known anyway
+ local flush = false
+ stackhack = true -- todo: only when grid snapping once enabled
+ -- todo: fast check if head = tail
+ for n in traverse_nodes(newhead) do -- we could just look for glue nodes
+ local id = getid(n)
+ if id ~= glue_code then
flush = true
+ else
+ local subtype = getsubtype(n)
+ if subtype == userskip_code then
+ if getattr(n,a_skipcategory) then
+ stackhack = true
+ else
+ flush = true
+ end
+ elseif subtype == parskip_code then
+ -- if where == new_graf then ... end
+ if texgetcount("c_spac_vspacing_ignore_parskip") > 0 then
+-- texsetcount("c_spac_vspacing_ignore_parskip",0)
+ setglue(n)
+ -- maybe removenode
+ end
+ end
end
- else
- -- tricky
end
- end
- if flush then
- if stackhead then
- if trace_collect_vspacing then report("%s > appending %s nodes to stack (final): %s",where,newhead) end
- setlink(stacktail,newhead)
- newhead = stackhead
- stackhead, stacktail = nil, nil
- end
- if stackhack then
- stackhack = false
- if trace_collect_vspacing then report("%s > processing %s nodes: %s",where,newhead) end
- -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
- newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
- else
- if trace_collect_vspacing then report("%s > flushing %s nodes: %s",where,newhead) end
- -- texlists.contrib_head = newhead
- end
- return tonode(newhead)
- else
- if stackhead then
- if trace_collect_vspacing then report("%s > appending %s nodes to stack (intermediate): %s",where,newhead) end
- setlink(stacktail,newhead)
+ texsetcount("c_spac_vspacing_ignore_parskip",0)
+ if flush then
+ if stackhead then
+ if trace_collect_vspacing then report("%s > appending %s nodes to stack (final): %s",where,newhead) end
+ setlink(stacktail,newhead)
+ newhead = stackhead
+ stackhead, stacktail = nil, nil
+ end
+ if stackhack then
+ stackhack = false
+ if trace_collect_vspacing then report("%s > processing %s nodes: %s",where,newhead) end
+ -- texlists.contrib_head = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ newhead = collapser(newhead,"page",where,trace_page_vspacing,true,a_snapmethod)
+ else
+ if trace_collect_vspacing then report("%s > flushing %s nodes: %s",where,newhead) end
+ -- texlists.contrib_head = newhead
+ end
+ return tonode(newhead)
else
- if trace_collect_vspacing then report("%s > storing %s nodes in stack (initial): %s",where,newhead) end
- stackhead = newhead
+ if stackhead then
+ if trace_collect_vspacing then report("%s > appending %s nodes to stack (intermediate): %s",where,newhead) end
+ setlink(stacktail,newhead)
+ else
+ if trace_collect_vspacing then report("%s > storing %s nodes in stack (initial): %s",where,newhead) end
+ stackhead = newhead
+ end
+ stacktail = newtail
+ -- texlists.contrib_head = nil
+ -- newhead = nil
end
- stacktail = newtail
- -- texlists.contrib_head = nil
- -- newhead = nil
end
+ return nil
end
- return nil
-end
-
-do
local ignore = table.tohash {
"split_keep",
@@ -1982,28 +2013,132 @@ end
do
- local outer = texnest[0]
- local reset = true
- local trace = false
- local report = logs.reporter("vspacing")
-
- directives.register("vspacing.resetprevdepth",function(v) reset = v end)
- trackers.register ("vspacing.resetprevdepth",function(v) trace = v end)
-
- function vspacing.resetprevdepth()
- if reset then
- local head = texlists.hold_head
- local skip = 0
- while head and head.id == insert_code do
- head = head.next
- skip = skip + 1
- end
+ local outer = texnest[0]
+ local enabled = true
+ local count = true
+ local trace = false
+ local report = logs.reporter("vspacing")
+
+ trackers.register("vspacing.synchronizepage",function(v)
+ trace = v
+ end)
+
+ directives.register("vspacing.synchronizepage",function(v)
+ if v == true or v == "count" then
+ enabled = true
+ count = true
+ elseif v == "first" then
+ enabled = true
+ count = false
+ else
+ enabled = false
+ count = false
+ end
+ end)
+
+ -- hm, check the old one
+
+ -- function vspacing.synchronizepage()
+ -- if enabled then
+ -- local head = texlists.hold_head
+ -- local skip = 0
+ -- while head and head.id == insert_code do
+ -- head = head.next
+ -- skip = skip + 1
+ -- end
+ -- if head then
+ -- outer.prevdepth = 0
+ -- end
+ -- if trace then
+ -- report("prevdepth %s at page %i, skipped %i, value %p",
+ -- head and "reset" or "kept",texgetcount("realpageno"),skip,outer.prevdepth)
+ -- end
+ -- end
+ -- end
+
+ local ignoredepth = -65536000
+
+ function vspacing.synchronizepage()
+ if enabled then
+ local newdepth = outer.prevdepth
+ local olddepth = newdepth
+ local oldlines = outer.prevgraf
+ local newlines = 0
+ local boxfound = false
+ local head = texlists.contrib_head
if head then
- outer.prevdepth = 0
+ local tail = find_node_tail(tonut(head))
+ while tail do
+ local id = getid(tail)
+ if id == hlist_code then
+ if not boxfound then
+ newdepth = getdepth(tail)
+ boxfound = true
+ end
+ newlines = newlines + 1
+ if not count then
+ break
+ end
+ elseif id == vlist_code then
+ if not boxfound then
+ newdepth = getdepth(tail)
+ boxfound = true
+ end
+ break
+ elseif id == glue_code then
+ local subtype = getsubtype(tail)
+ if not (subtype == baselineskip_code or subtype == lineskip_code) then
+ break
+ elseif boxfound and not count then
+ break
+ end
+ elseif id == penalty_code then
+ if boxfound and not count then
+ break
+ end
+ else
+ -- ins, mark, kern, rule, boundary, whatsit
+ break
+ end
+ tail = getprev(tail)
+ end
end
+ if boxfound then
+ -- what if newdepth ...
+ else
+ texset("prevdepth",ignoredepth)
+ outer.prevdepth = ignoredepth
+ end
+ texset("prevgraf", newlines)
+ outer.prevgraf = newlines
if trace then
- report("prevdepth %s at page %i, skipped %i, value %p",
- head and "reset" or "kept",texgetcount("realpageno"),skip,outer.prevdepth)
+ report("page %i, prevdepth %p (last depth %p), prevgraf %i (from %i), %sboxes",
+ texgetcount("realpageno"),olddepth,newdepth,oldlines,newlines,boxfound and "" or "no ")
+ end
+ end
+ end
+
+ local trace = false
+
+ trackers.register("vspacing.forcestrutdepth",function(v) trace = v end)
+
+ function vspacing.forcestrutdepth(n,depth,trace_mode)
+ local box = texgetbox(n)
+ if box then
+ box = tonut(box)
+ local head = getlist(box)
+ if head then
+ local tail = find_node_tail(head)
+ if tail and getid(tail) == hlist_code then
+ local dp = getdepth(tail)
+ if dp < depth then
+ setdepth(tail,depth)
+ outer.prevdepth = depth
+ if trace or trace_mode > 0 then
+ nuts.setvisual(tail,"depth")
+ end
+ end
+ end
end
end
end
@@ -2022,8 +2157,15 @@ do
}
implement {
- name = "resetprevdepth",
- actions = vspacing.resetprevdepth,
+ name = "synchronizepage",
+ actions = vspacing.synchronizepage,
+ scope = "private"
+ }
+
+ implement {
+ name = "forcestrutdepth",
+ arguments = { "integer", "dimension", "integer" },
+ actions = vspacing.forcestrutdepth,
scope = "private"
}
@@ -2069,16 +2211,47 @@ do
arguments = { "string", "string" }
}
- local remove_node = nodes.remove
- local find_node_tail = nodes.tail
+ -- local remove_node = nodes.remove
+ -- local find_node_tail = nodes.tail
+ --
+ -- interfaces.implement {
+ -- name = "fakenextstrutline",
+ -- actions = function()
+ -- local head = texlists.page_head
+ -- if head then
+ -- local head = remove_node(head,find_node_tail(head),true)
+ -- texlists.page_head = head
+ -- buildpage()
+ -- end
+ -- end
+ -- }
interfaces.implement {
- name = "fakenextstrutline",
+ name = "removelastline",
actions = function()
local head = texlists.page_head
if head then
- local head = remove_node(head,find_node_tail(head),true)
- texlists.page_head = head
+ local tail = find_node_tail(head)
+ if tail then
+ -- maybe check for hlist subtype 1
+ local head = remove_node(head,tail,true)
+ texlists.page_head = head
+ buildpage()
+ end
+ end
+ end
+ }
+
+ interfaces.implement {
+ name = "showpagelist", -- will improve
+ actions = function()
+ local head = texlists.page_head
+ if head then
+ print("start")
+ while head do
+ print(" " .. tostring(head))
+ head = head.next
+ end
end
end
}
diff --git a/tex/context/base/mkiv/spac-ver.mkiv b/tex/context/base/mkiv/spac-ver.mkiv
index 229963997..b71e28219 100644
--- a/tex/context/base/mkiv/spac-ver.mkiv
+++ b/tex/context/base/mkiv/spac-ver.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{spac-ver}{1.001}
+\registerctxluafile{spac-ver}{}
% todo: use usernodes ?
@@ -61,7 +61,7 @@
\newif\iflocalinterlinespace
-\newskip\s_spac_vspacing_temp \s_spac_vspacing_temp\bigskipamount
+\newskip \s_spac_vspacing_temp \s_spac_vspacing_temp\bigskipamount
\def\skipfactor {.75}
\def\skipgluefactor{.25}
@@ -527,7 +527,6 @@
\unexpanded\def\spac_lines_start_correction[#1]%
{\edef\m_spac_lines_around{#1}%
- % todo: play with \fakenextstrutline
\spac_lines_action_around
\d_spac_prevdepth\prevdepth
\spac_lines_initialize_corrections
@@ -1050,6 +1049,24 @@
\box\scratchbox
\endgroup}
+\unexpanded\def\halflinestrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\dimexpr\strutht-.5\strutht-.5\strutdp\relax
+ \box\scratchbox
+ \endgroup}
+
+\unexpanded\def\noheightstrut
+ {\relax
+ \dontleavehmode
+ \begingroup
+ \setbox\scratchbox\copy\strutbox
+ \ht\scratchbox\zeropoint
+ \box\scratchbox
+ \endgroup}
+
%D Sometimes a capstrut comes in handy
%D
%D \starttabulate[|Tl|l|l|]
@@ -1291,7 +1308,7 @@
\let\normaloffinterlineskip\offinterlineskip % knuth's original
\appendtoks
- \ifvmode\clf_resetprevdepth\fi % a nasty hack (tested for a while now)
+ \ifvmode\clf_synchronizepage\fi % a nasty hack (tested for a while now)
\to \everyafteroutput
%D My own one:
@@ -1355,17 +1372,21 @@
\global\let\restoreinterlinepenalty\spac_penalties_restore
\fi}
-\newif\ifgridsnapping % to be sure
-
-\def\defaultwidowpenalty {2000} % was: 1000
-\def\defaultclubpenalty {2000} % was: 800
-\def\defaultdisplaywidowpenalty {50}
-\def\defaultbrokenpenalty {100}
-
-\def\defaultgridwidowpenalty {0}
-\def\defaultgridclubpenalty {0}
-\def\defaultgriddisplaywidowpenalty {0}
-\def\defaultgridbrokenpenalty {0}
+\def\defaultdisplaywidowpenalty {50}
+\def\defaultwidowpenalty {2000} % was: 1000
+\def\defaultclubpenalty {2000} % was: 800
+\def\defaultbrokenpenalty {100}
+\def\defaultdoublehyphendemerits {10000}
+\def\defaultfinalhyphendemerits {5000}
+\def\defaultadjdemerits {10000}
+
+\def\defaultgriddisplaywidowpenalty {0}
+\def\defaultgridwidowpenalty {0}
+\def\defaultgridclubpenalty {0}
+\def\defaultgridbrokenpenalty {0}
+\def\defaultgriddoublehyphendemerits{10000} % always was so
+\def\defaultgridfinalhyphendemerits {5000} % always was so
+\def\defaultgridadjdemerits {10000} % always was so
\unexpanded\def\nopenalties
{\widowpenalty \zerocount
@@ -1390,10 +1411,13 @@
\directsetup{\systemsetupsprefix\s!reset}
- \widowpenalty \defaultwidowpenalty
- \clubpenalty \defaultclubpenalty
- \displaywidowpenalty\defaultdisplaywidowpenalty
- \brokenpenalty \defaultbrokenpenalty
+ \widowpenalty \defaultwidowpenalty
+ \clubpenalty \defaultclubpenalty
+ \displaywidowpenalty \defaultdisplaywidowpenalty
+ \brokenpenalty \defaultbrokenpenalty
+ \doublehyphendemerits\defaultdoublehyphendemerits
+ \finalhyphendemerits \defaultfinalhyphendemerits
+ \adjdemerits \defaultadjdemerits
\stopsetups
@@ -1401,10 +1425,13 @@
\directsetup{\systemsetupsprefix\s!reset}
- \widowpenalty \defaultgridwidowpenalty
- \clubpenalty \defaultgridclubpenalty
- \displaywidowpenalty\defaultgriddisplaywidowpenalty
- \brokenpenalty \defaultgridbrokenpenalty
+ \widowpenalty \defaultgridwidowpenalty
+ \clubpenalty \defaultgridclubpenalty
+ \displaywidowpenalty \defaultgriddisplaywidowpenalty
+ \brokenpenalty \defaultgridbrokenpenalty
+ \doublehyphendemerits\defaultgriddoublehyphendemerits
+ \finalhyphendemerits \defaultgridfinalhyphendemerits
+ \adjdemerits \defaultgridadjdemerits
\stopsetups
@@ -1414,9 +1441,12 @@
\directsetup{\systemsetupsprefix\s!reset}
- \setpenalties\widowpenalties\plustwo\maxdimen
- \setpenalties\clubpenalties \plustwo\maxdimen
- \brokenpenalty \maxdimen
+ \setpenalties \widowpenalties \plustwo \maxdimen
+ \setpenalties \clubpenalties \plustwo \maxdimen
+ \brokenpenalty \maxdimen
+ \doublehyphendemerits \defaultdoublehyphendemerits
+ \finalhyphendemerits \defaultfinalhyphendemerits
+ \adjdemerits \defaultadjdemerits
\stopsetups
@@ -1515,6 +1545,17 @@
{%\gridsnappingtrue
\begincsname\??gridsnappers#1\endcsname}
+% maybe:
+%
+% \def\spac_grids_snap_value_set#1%
+% {%\gridsnappingtrue
+% \ifcsname\??gridsnappers#1\endcsname
+% \lastnamedcs
+% \else
+% \definegridsnapping[#1][#1]%
+% \begincsname\??gridsnappers#1\endcsname
+% \fi}
+
\def\spac_grids_snap_value_auto#1%
{\ifcsname\??gridsnappers#1\endcsname
\lastnamedcs
@@ -1545,35 +1586,41 @@
% min centers a box rounded downwards
% max centers a box rounded upwards
-%D We're not downward compatible with \MKII !
+%D We're not downward compatible with \MKII ! Not yet in interface file:
+
+\definegridsnapping[\v!normal] [\v!maxheight,\v!maxdepth,\v!strut]
+\definegridsnapping[\v!standard] [\v!maxheight,\v!maxdepth,\v!strut]
+\definegridsnapping[\v!yes] [\v!maxheight,\v!maxdepth,\v!strut]
-\definegridsnapping[\v!normal] [\v!maxheight,\v!maxdepth,\v!strut]
-\definegridsnapping[\v!standard] [\v!maxheight,\v!maxdepth,\v!strut]
-\definegridsnapping[\v!yes] [\v!maxheight,\v!maxdepth,\v!strut]
+\definegridsnapping[\v!strict] [\v!maxdepth:0.8,\v!maxheight:0.8,\v!strut]
+\definegridsnapping[\v!tolerant] [\v!maxdepth:1.2,\v!maxheight:1.2,\v!strut]
+\definegridsnapping[\v!verytolerant] [\v!maxdepth:1.4,\v!maxheight:1.4,\v!strut]
-\definegridsnapping[\v!strict] [\v!maxdepth:0.8,\v!maxheight:0.8,\v!strut]
-\definegridsnapping[\v!tolerant] [\v!maxdepth:1.2,\v!maxheight:1.2,\v!strut]
+\definegridsnapping[\v!tolerant:10] [\v!maxdepth:1.1,\v!maxheight:1.1,\v!strut] % 10 pct tolerance
+\definegridsnapping[\v!tolerant:20] [\v!maxdepth:1.2,\v!maxheight:1.2,\v!strut] % 20 pct tolerance
+\definegridsnapping[\v!tolerant:30] [\v!maxdepth:1.3,\v!maxheight:1.3,\v!strut] % 30 pct tolerance
+\definegridsnapping[\v!tolerant:40] [\v!maxdepth:1.4,\v!maxheight:1.4,\v!strut] % 40 pct tolerance
-\definegridsnapping[\v!top] [\v!minheight,\v!maxdepth,\v!strut]
-\definegridsnapping[\v!bottom] [\v!maxheight,\v!mindepth,\v!strut]
-\definegridsnapping[\v!both] [\v!minheight,\v!mindepth,\v!strut]
+\definegridsnapping[\v!top] [\v!minheight,\v!maxdepth,\v!strut]
+\definegridsnapping[\v!bottom] [\v!maxheight,\v!mindepth,\v!strut]
+\definegridsnapping[\v!both] [\v!minheight,\v!mindepth,\v!strut]
-\definegridsnapping[\v!broad] [\v!maxheight,\v!maxdepth,\v!strut,0.8] % maybe 0.85
-\definegridsnapping[\v!fit] [\v!maxheight,\v!maxdepth,\v!strut,1.2] % tight 0.15
+\definegridsnapping[\v!broad] [\v!maxheight,\v!maxdepth,\v!strut,0.8] % maybe 0.85
+\definegridsnapping[\v!fit] [\v!maxheight,\v!maxdepth,\v!strut,1.2] % tight 0.15
-\definegridsnapping[\v!first] [\v!first]
-\definegridsnapping[\v!last] [\v!last]
-\definegridsnapping[\v!high] [\v!minheight,\v!maxdepth,\v!none]
-\definegridsnapping[\v!one] [\v!minheight,\v!mindepth]
-\definegridsnapping[\v!low] [\v!maxheight,\v!mindepth,\v!none]
-\definegridsnapping[\v!none] [\v!none]
-\definegridsnapping[\v!line] [\v!line]
-\definegridsnapping[\v!strut] [\v!strut]
-\definegridsnapping[\v!box] [\v!box]
-\definegridsnapping[\v!min] [\v!min]
-\definegridsnapping[\v!max] [\v!max]
+\definegridsnapping[\v!first] [\v!first]
+\definegridsnapping[\v!last] [\v!last]
+\definegridsnapping[\v!high] [\v!minheight,\v!maxdepth,\v!none]
+\definegridsnapping[\v!one] [\v!minheight,\v!mindepth]
+\definegridsnapping[\v!low] [\v!maxheight,\v!mindepth,\v!none]
+\definegridsnapping[\v!none] [\v!none]
+\definegridsnapping[\v!line] [\v!line]
+\definegridsnapping[\v!strut] [\v!strut]
+\definegridsnapping[\v!box] [\v!box]
+\definegridsnapping[\v!min] [\v!min]
+\definegridsnapping[\v!max] [\v!max]
-\definegridsnapping[\v!middle] [\v!maxheight,\v!maxdepth] % used in placement
+\definegridsnapping[\v!middle] [\v!maxheight,\v!maxdepth] % used in placement
\definegridsnapping[\v!math] [\v!maxdepth:1.05,\v!maxheight:1.05,\v!strut] % experimental, maybe 1.1
\definegridsnapping[\v!math:\v!line] [\v!math,\v!line,\v!split]
@@ -1615,8 +1662,6 @@
\newif\ifgridsnapping
-\let\showgridsnapping\relax
-
%unexpanded\def\moveongrid {\dosingleempty\spac_grids_move_on}
\unexpanded\def\snaptogrid {\dosingleempty\spac_grids_snap_to}
\unexpanded\def\placeongrid{\dosingleempty\spac_grids_place_on}
@@ -1656,7 +1701,7 @@
\fi
\doifelsenothing{#1}{\spac_grids_snap_value_set\v!normal}{\spac_grids_snap_value_set{#1}}%
\clf_vspacingsnap\nextbox\attribute\snapmethodattribute\relax
- \ifvbox\nextbox\vbox\else\hbox\fi attr \snapmethodattribute \zerocount {\box\nextbox}% pack ?
+ \ifvbox\nextbox\vbox\else\hbox\fi attr \snapmethodattribute \zerocount {\box\nextbox}% *pack ?
\egroup}
\def\spac_grids_check_nop
@@ -1911,6 +1956,7 @@
\newconditional\c_space_vspacing_done
\newconditional\c_space_vspacing_fixed
+\newconditional\c_space_ignore_parskip
\appendtoks
\s_spac_vspacing_temp\zeropoint
@@ -1933,6 +1979,9 @@
\relax
\to \everyafterblankhandling
+\unexpanded\def\setblankpacked
+ {\settrue\c_space_ignore_parskip}
+
\unexpanded\def\setblankcategory#1%
{\settrue\c_space_vspacing_done
\attribute\skipcategoryattribute#1\relax}
@@ -1971,6 +2020,7 @@
\def\dostartblankhandling
{\begingroup
\setfalse\c_space_vspacing_done
+ \setfalse\c_space_ignore_parskip
\the\everybeforeblankhandling}
\unexpanded\def\stopblankhandling
@@ -1978,7 +2028,11 @@
\ifconditional\c_space_vspacing_done
\vskip\s_spac_vspacing_temp
\fi
- \endgroup}
+ \ifconditional\c_space_ignore_parskip
+ \endgroup\ignoreparskip
+ \else
+ \endgroup
+ \fi}
\unexpanded\def\flushblankhandling
{\the\everyafterblankhandling
@@ -2117,10 +2171,6 @@
\definevspacingamount[\v!height] [\strutht] [\bodyfontstrutheight]
\definevspacingamount[\v!depth] [\strutdp] [\bodyfontstrutdepth]
-\definevspacingamount[-\v!line] [-\openlineheight] [-\bodyfontlineheight]
-\definevspacingamount[-\v!halfline] [-.5\openlineheight] [-.5\bodyfontlineheight]
-\definevspacingamount[-\v!quarterline][-.25\openlineheight][-.25\bodyfontlineheight]
-
\definevspacingamount[\v!standard] [.75\openlineheight] [.75\openlineheight] % mkii compatible
\def\bodyfontwhitespace
@@ -2169,23 +2219,35 @@
\definevspacing[\v!disable] [category:5]
\definevspacing[\v!nowhite] [category:6]
\definevspacing[\v!back] [category:7]
-% together [category:8]
+\definevspacing[\v!packed] [category:8] % noparskip (kind of special)
\definevspacing[\v!overlay] [category:9]
\definevspacing[\v!enable] [category:10]
+%definevspacing[\v!noparskip] [category:8]
+%definevspacing[\v!notopskip] [category:11]
+
\definevspacing[\v!weak] [order:0]
\definevspacing[\v!strong] [order:100]
\definevspacing[\s!default] [\v!white] % was big for a while
-% \dorecurse{10} % todo: other values < 4000
-% {\normalexpanded{\definevspacing[\v!samepage-\recurselevel][penalty:\the\numexpr4000+250*\recurselevel\relax]}}
-
\newcount\c_spac_vspacing_special_base \c_spac_vspacing_special_base = 32250 % 4000
\newcount\c_spac_vspacing_special_step \c_spac_vspacing_special_step = 10 % 250
-\dorecurse{10}
- {\normalexpanded{\definevspacing[\v!samepage-\recurselevel][penalty:\the\numexpr\c_spac_vspacing_special_base+\c_spac_vspacing_special_step*\recurselevel\relax]}}
+\unexpanded\def\spac_vspacing_define_same_page#1%
+ {\normalexpanded{\definevspacing
+ [\v!samepage-#1]%
+ [penalty:\the\numexpr\c_spac_vspacing_special_base+\c_spac_vspacing_special_step*#1\relax]}}
+
+\dorecurse{20}{\spac_vspacing_define_same_page{#1}}
+
+\ifdefined\everydefinesection
+ \appendtoks
+ \spac_vspacing_define_same_page\maxstructuredepth
+ \to \everydefinesection
+\else
+ % the order has changed so this code has to move to strf-def.mkiv then
+\fi
\definevspacing[\v!default] [\v!big] % todo: needs to adapt to \setupblank
\definevspacing[\v!before] [\v!default] % but we need to avoid circular references
@@ -2454,20 +2516,74 @@
\prevdepth\zeropoint
-% not ok, so we need to figure out another way to fix this messy prevdepth-across-page issue
-% as encountered in forced blank skips (see lua code)
-%
-% \appendtoks
-% \ifvmode\clf_resetprevdepth\fi
-% \to \everyafteroutput
-%
-% this should only happen when there is nothing left over (how to detemine that) .. testcase:
-%
-% \dorecurse{41}{line\par}
-% \starttyping
-% line 1
-% line 2
-% line 3
-% \stoptyping
+%D Helper:
+
+\unexpanded\def\checkedblank[#1]%
+ {\edef\p_blank{#1}%
+ \ifx\p_blank\empty
+ % ignore
+ \else\ifx\p_blank\v!none
+ % ignore
+ \else
+ \blank[\p_blank]%
+ \fi\fi}
+
+% \unexpanded\def\applycheckedblankwith#1#2#3%
+% {\edef\p_blank{#2}%
+% \ifx\p_blank\empty
+% % ignore
+% \else\ifx\p_blank\v!none
+% % ignore
+% \else
+% \edef\p_before{#1}%
+% \edef\p_after {#3}%
+% \blank[\ifx\p_before\empty\else\p_before,\fi\p_blank\ifx\p_after\empty\else,\p_after\fi]%
+% \fi\fi}
+
+% \setupwhitespace[line]
+% \prerollblank[2*line] \the\prerolledblank
+% \prerollblank[-2*line] \the\prerolledblank
+
+\newskip\prerolledblank
+
+\unexpanded\def\prerollblank[#1]%
+ {\begingroup
+ \edef\p_blank{#1}%
+ \ifx\p_blank\empty
+ \global\prerolledblank\zeropoint
+ \else\ifx\p_blank\v!none
+ \global\prerolledblank\zeropoint
+ \else
+ % don't mess with \arskip here!
+ \scratchskip\plusten\lineheight
+ \setbox\scratchbox\vbox
+ {\vskip\scratchskip
+ \kern\zeropoint
+ \blank[\p_blank]}%
+ % \dimexpr doesn't work well with skips
+ \advance\scratchskip-\ht\scratchbox
+ \global\prerolledblank-\scratchskip
+ \fi\fi
+ \endgroup}
+
+\newcount\c_spac_vspacing_ignore_parskip
+
+% \setupwhitespace[line]
+% \setuphead[subject][after={\blank[packed]},style=\bfb]
+% \subject{foo}
+% test \par
+% test \par
+% \blank[packed] % \ignoreparskip
+% test \par
+% test \par
+% \ignoreparskip
+% test \par
+% test \par
+% \setuphead[subject][after={\blank[nowhite]},style=\bfb]
+% \subject{foo}
+% test \par
+% test \par
+
+\unexpanded\def\ignoreparskip{\c_spac_vspacing_ignore_parskip\plusone}
\protect \endinput
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index 56ab3aa69..1351546fd 100644
Binary files a/tex/context/base/mkiv/status-files.pdf and b/tex/context/base/mkiv/status-files.pdf differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 85a976f79..15354a5ea 100644
Binary files a/tex/context/base/mkiv/status-lua.pdf and b/tex/context/base/mkiv/status-lua.pdf differ
diff --git a/tex/context/base/mkiv/status-mkiv.lua b/tex/context/base/mkiv/status-mkiv.lua
deleted file mode 100644
index ab1419c98..000000000
--- a/tex/context/base/mkiv/status-mkiv.lua
+++ /dev/null
@@ -1,7441 +0,0 @@
-return {
- ["core"]={
- {
- ["category"]="mkvi",
- ["filename"]="font-gds",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-run",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-sel",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-pat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-rep",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="luat-usr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="math-rad",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-cst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-author",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-cite",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-commands",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-default",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-definitions",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-list",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-page",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-flr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="spac-prf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-not",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="toks-map",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="toks-tra",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-chr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-lig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-wrp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="norm-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-pln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-mes",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="luat-cod",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (3)",
- ["filename"]="luat-bas",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (3)",
- ["filename"]="luat-lib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="forward dependency",
- ["filename"]="catc-act",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="catc-sym",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="only needed for mkii xml parser",
- ["filename"]="catc-xml",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="cldf-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="syst-aux",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-fnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe combine (1)",
- ["filename"]="syst-rtp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe combine (2)",
- ["filename"]="file-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe combine (2)",
- ["filename"]="file-res",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="file-lib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="supp-dir",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="char-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="char-utf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="forward dependency",
- ["filename"]="char-act",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-sys",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-aux",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mult-chk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="mult-dim",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cldf-int",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="luat-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="toks-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-mkr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might move from here",
- ["filename"]="core-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might need more redoing",
- ["filename"]="core-env",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="in due time more might move to here",
- ["filename"]="layo-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe this becomes a runtime module",
- ["filename"]="node-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe use context.generics/context.sprint here",
- ["filename"]="cldf-bas",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might need more redoing",
- ["filename"]="node-fin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs integration and configuration",
- ["filename"]="node-mig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-bld",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-sus",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="node-pag",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="back-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-col",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-lay",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-neg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="attr-eff",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs more usage",
- ["filename"]="trac-tex",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-deb",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="supp-box",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="supp-ran",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be moved to the math-* modules",
- ["filename"]="supp-mat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will grow",
- ["filename"]="typo-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="file-syn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="file-mod",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-fil",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-nop",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-yes",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="regi-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="enco-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="hand-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="namespace should be languages",
- ["filename"]="lang-lab",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-hyp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="unic-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-uti",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe rename to core-two",
- ["filename"]="core-two",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-dat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-ext",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-grp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="node-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-cut",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-url",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-hyp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-frq",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-frd",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lang-wrd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="might need more redoing",
- ["filename"]="file-job",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="sort-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="pack-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="endpar experimental code",
- ["filename"]="pack-mrl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="pack-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-fen",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lxml-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="lxml-sor",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="typo-prc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-tag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this module might go away when code has been moved",
- ["filename"]="strc-doc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="still some rough edges",
- ["filename"]="strc-num",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-mar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-sbe",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-lst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="some of the local current and synchronization macros will be renamed",
- ["filename"]="strc-sec",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="(support for) setups might get improved",
- ["filename"]="strc-ren",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this module might go away",
- ["filename"]="strc-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="some more low level names might change",
- ["filename"]="strc-ref",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="use setups for rendering",
- ["filename"]="strc-reg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-lev",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe some tuning is needed / will happen",
- ["filename"]="spac-ali",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="probably needs some more work",
- ["filename"]="spac-hor",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe some changes will happen",
- ["filename"]="spac-ver",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="could be improved if needed",
- ["filename"]="spac-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this needs to be checked occasionally",
- ["filename"]="spac-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-def",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs thinking and redoing",
- ["filename"]="spac-grd",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="in transition",
- ["filename"]="anch-pos",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe change locationattribute names",
- ["filename"]="scrn-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-ref",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will change when we have objects at lua end",
- ["filename"]="pack-obj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-itm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe more common counter code here and setups need to be improved",
- ["filename"]="strc-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-des",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="(interactive) coupling is not yet working",
- ["filename"]="strc-enu",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-ind",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs to be adapted when strc-con/des/enu changes",
- ["filename"]="strc-lab",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-syn",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="a funny mix",
- ["filename"]="core-sys",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-var",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-otr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might end up elsewhere",
- ["filename"]="page-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="dealing with insertions might change",
- ["filename"]="page-ins",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-fac",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="otr commands will be redone",
- ["filename"]="page-brk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="helpers for columns",
- ["filename"]="page-col",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="room for improvement and extension",
- ["filename"]="page-inf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-grd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be extended when columns are redone",
- ["filename"]="page-flt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-not",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="can probably be improved",
- ["filename"]="page-one",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-lay",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-box",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="a few things left",
- ["filename"]="page-txt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-sid",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="in due time we need a further cleanup",
- ["filename"]="strc-flt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-pst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="might be extended",
- ["filename"]="page-mbk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be reimplemented",
- ["filename"]="page-mul",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="work in progress",
- ["filename"]="page-mix",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be reimplemented",
- ["filename"]="page-set",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-lyr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-pos",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="page-mak",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might get extended",
- ["filename"]="page-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-mar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-itm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="check other modules for buffer usage",
- ["filename"]="buff-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="check obsolete processbuffer",
- ["filename"]="buff-ver",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="experimental code",
- ["filename"]="buff-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-cc",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-default",
- ["loading"]="indirect",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-escaped",
- ["loading"]="indirect",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-nested",
- ["loading"]="indirect",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-blk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-imp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe some extensions and delayed loading, needs checking",
- ["filename"]="page-sel",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="functionality needs checking",
- ["filename"]="scrn-wid",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-but",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-bar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="strc-bkm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-tal",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="somewhat weird",
- ["filename"]="tabl-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="unchecked",
- ["filename"]="tabl-pln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="tabl-tab",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["comment"]="can probably be improved (names and such)",
- ["filename"]="tabl-tbl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="frozen functionaly so no drastic cleanup",
- ["filename"]="tabl-ntb",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="tabl-mis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="tabl-nte",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be redone when needed",
- ["filename"]="tabl-ltb",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be adapted when needed (and rest is done)",
- ["filename"]="tabl-tsp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="tabl-xtb",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="only when natural tables need a replacement",
- ["filename"]="tabl-xnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-fld",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="namespace needs checking",
- ["filename"]="scrn-hlp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="char-enc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-lib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-fil",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-var",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-fea",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-mat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="needs occasional checking and upgrading",
- ["filename"]="font-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-sym",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-sty",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-emp",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-col",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="font-pre",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="font-unk",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="likely this will become a module",
- ["filename"]="font-tra",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this could become a module",
- ["filename"]="font-chk",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this one might be merged",
- ["filename"]="font-uni",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-col",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="font-aux",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-lan",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is work in progress",
- ["filename"]="lxml-css",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-chr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="work in progress",
- ["filename"]="blob-ini",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-jus",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="trac-vis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-cln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-spa",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="do we keep the style and color or not",
- ["filename"]="typo-krn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="typo-itc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe singular setup",
- ["filename"]="typo-dir",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-brk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-cap",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-dig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-rep",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="maybe there will be a nicer interface",
- ["filename"]="typo-txt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-drp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-fln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="type-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-set",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-def",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-fbk",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-lua",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-one",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-otf",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-siz",
- ["loading"]="type-set",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="placeholder to prevent other loading",
- ["filename"]="type-tmf",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="scrp-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this module is obsolete",
- ["filename"]="prop-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mlib-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="metapost code is always evolving",
- ["filename"]="meta-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code used in a project",
- ["filename"]="meta-lua",
- ["loading"]="experimental",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-fnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-tex",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe this one will be merged",
- ["filename"]="meta-fun",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might get updated when mp code gets cleaned up",
- ["filename"]="meta-pag",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-grd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-mrk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-flw",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-spr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be made better",
- ["filename"]="page-plg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs more work (and thinking)",
- ["filename"]="page-str",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="in transition",
- ["filename"]="anch-pgr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="in transition",
- ["filename"]="anch-bck",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will stay experimental for a while",
- ["filename"]="anch-tab",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="anch-bar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="interesting old mechanism to keep around (module?)",
- ["filename"]="anch-snc",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this file might merge into others",
- ["filename"]="math-pln",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-for",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="eventually this will be split and spread",
- ["filename"]="math-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will be checked and improved",
- ["filename"]="math-ali",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="needs testing",
- ["filename"]="math-arr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="math-stc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="math-acc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="at least for the moment",
- ["filename"]="math-frc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-scr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-int",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code get replaced (by autodelimiters)",
- ["filename"]="math-del",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="math-fen",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might move to here",
- ["filename"]="math-inl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="code might move to here",
- ["filename"]="math-dis",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="phys-dim",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="some more functionality will end up here",
- ["filename"]="strc-mat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="chem-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="chem-str",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-scr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="maybe some cleanup is needed",
- ["filename"]="node-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["comment"]="needs testing",
- ["filename"]="font-sol",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="strc-not",
- ["loading"]="always",
- ["status"]="todo",
- },
- {
- ["category"]="mkvi",
- ["comment"]="will be extended as part of crited",
- ["filename"]="strc-lnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-del",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-trf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-inc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-fig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="grph-raw",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-box",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="pack-bar",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-app",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-fig",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="more or less obsolete",
- ["filename"]="lang-spa",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="bibl-bib",
- ["loading"]="on demand",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="bibl-tra",
- ["loading"]="on demand",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["comment"]="not needed",
- ["filename"]="meta-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-log",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="task-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cldf-ver",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cldf-com",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="core-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="will always be messy",
- ["filename"]="core-def",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="object related code might move or change",
- ["filename"]="back-pdf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="no code, just an example of usage",
- ["filename"]="back-swf",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="no code, just an example of usage",
- ["filename"]="back-u3d",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mlib-pdf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="mlib-pps",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-pdf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="might need more work",
- ["filename"]="grph-epd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="some parameters might move from export to backend",
- ["filename"]="back-exp",
- ["loading"]="always",
- ["status"]="okay",
- },
- },
- ["extras"]={
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-arrange",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-combine",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-common",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-ideas",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-listing",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-markdown",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-select",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["comment"]="add-on for mtx-context",
- ["filename"]="mtx-context-timing",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["implementations"]={
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-fontawesome",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-ebgaramond",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-gentium",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-ipaex",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lato",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-libertinus",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mathdigits",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-minion",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-opendyslexic",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-source",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-tex",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-mp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="buff-imp-parsed-xml",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-grid",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-mat",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-outlines",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-tab",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-apa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="publ-imp-aps",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-crayola",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-rainbow",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-ral",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-dem",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-ema",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-rgb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-x11",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-imp-xwi",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-exa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-fil",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-fld",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-rhh",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="java-imp-stp",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-clp",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-dum",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-fen",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-mis",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-nav",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-pre",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="meta-imp-txt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-cow",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-eur",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-jmn",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-mis",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-mvs",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-imp-nav",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-antykwa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-antykwapoltawskiego",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-asana",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-averia",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-buy",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-cambria",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-charter",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-cleartype",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-computer-modern-unicode",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-cow",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-dejavu",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-euler",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-ghz",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-hgz",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-husayni",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-hvmath",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-inconsolata",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-informal",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-iwona",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-kurier",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-latinmodern",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-liberation",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-libertine",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lmnames",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lucida-opentype",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-lucida-typeone",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mathdesign",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mathtimes",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-mscore",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-osx",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-postscript",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-punknova",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-texgyre",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-unfonts",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-xits",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="type-imp-xitsbidi",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-ini",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-old",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-tra",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-usr",
- ["loading"]="always",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="publ-xml",
- ["loading"]="always",
- ["status"]="pending",
- },
- },
- ["lua"]={
- {
- ["category"]="lua",
- ["filename"]="anch-pgr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="bibl-tst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-fio",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-prs",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-scn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-stp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cont-run",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-cff",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-cft",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-dsp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-gbn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-hsh",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-mps",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-nod",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ocl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-odk",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-odv",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-off",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-one",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-onr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-osd",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otl",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oto",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ots",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oup",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-sel",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-shp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ttf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-web",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-xtx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-ctx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-gen",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="good-mth",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-con",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-mem",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-pat",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-rul",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-gzip",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-package",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-sandbox",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-cnt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-de",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-en",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-nl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-frq-pt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-rep",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-usr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-ini",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-dir",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-int",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-lua",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ltp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-scn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-met",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-nut",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-cst",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-apa",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-aps",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-default",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-imp-replacements",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-jrn",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-reg",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-sor",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-ibm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-pdfdoc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="scrp-tha",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-prf",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-map",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-tra",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-par",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-chr",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-duc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-inj",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-lin",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-tal",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-wrp",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-fil",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lib-imp-gm",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lib-imp-gs",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-ran",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sac",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sbx",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sci",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-soc",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-client",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-library",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-sqlite",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-imp-swiglib",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-loggers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-sessions",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-tickets",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-tracers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql-users",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-you",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="m-escrito",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-characters-properties",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-words",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-xml-analyzers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="x-math-svg",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="anch-pos",
- ["loading"]="anch-pos",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-col",
- ["loading"]="attr-col",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-eff",
- ["loading"]="attr-eff",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-ini",
- ["loading"]="attr-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-lay",
- ["loading"]="attr-lay",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-neg",
- ["loading"]="attr-neg",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="attr-mkr",
- ["loading"]="attr-mkr",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="experimental code, maybe some will move elsewhere",
- ["filename"]="back-exp",
- ["loading"]="back-exp",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="back-ini",
- ["loading"]="back-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="back-pdf",
- ["loading"]="back-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="bibl-bib",
- ["loading"]="on demand",
- },
- {
- ["category"]="lua",
- ["filename"]="bibl-tra",
- ["loading"]="on demand",
- },
- {
- ["category"]="lua",
- ["filename"]="blob-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-default",
- ["loading"]="buff-imp-default",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-escaped",
- ["loading"]="buff-imp-escaped",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-lua",
- ["loading"]="buff-imp-lua",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-mp",
- ["loading"]="buff-imp-mp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-nested",
- ["loading"]="buff-imp-nested",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-imp-parsed-xml",
- ["loading"]="buff-imp-parsed-xml",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-tex",
- ["loading"]="buff-imp-tex",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="todo: colorization and nesting as in scite",
- ["filename"]="buff-imp-xml",
- ["loading"]="buff-imp-xml",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="buff-par",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe we will provide a few more (nesting) methods",
- ["filename"]="buff-ver",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="catc-ini",
- ["loading"]="catc-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-cjk",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-def",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe dataonly",
- ["filename"]="char-enc",
- ["loading"]="char-enc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-ent",
- ["loading"]="char-ent",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe move blocks table to separate (dataonly) file",
- ["filename"]="char-ini",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-map",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-tex",
- ["loading"]="char-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="char-utf",
- ["loading"]="char-utf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="chem-ini",
- ["loading"]="chem-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="chem-str",
- ["loading"]="chem-str",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be extended and can be optimized if needed",
- ["filename"]="cldf-bas",
- ["loading"]="cldf-bas",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="might change or even go away",
- ["filename"]="cldf-com",
- ["loading"]="cldf-com",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-ini",
- ["loading"]="cldf-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="cldf-int",
- ["loading"]="cldf-int",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe this code can be redone more efficiently/robust",
- ["filename"]="cldf-ver",
- ["loading"]="cldf-ver",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="also used in mtx-*",
- ["filename"]="colo-icc",
- ["loading"]="colo-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="colo-ini",
- ["loading"]="colo-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this code might move to a module",
- ["filename"]="colo-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-con",
- ["loading"]="core-con",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-ctx",
- ["loading"]="core-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-dat",
- ["loading"]="core-dat",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe abusing the tex namespace is wrong",
- ["filename"]="core-env",
- ["loading"]="core-env",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="core-sys",
- ["loading"]="core-sys",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["commands"]="this is in fact replaced by core-dat",
- ["filename"]="core-two",
- ["loading"]="core-two",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="some code will move to better places",
- ["filename"]="core-uti",
- ["loading"]="core-uti",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="data-aux",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-bin",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-con",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-crl",
- ["loading"]="never",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-ctx",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-env",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-exp",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-fil",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-gen",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-ini",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-inp",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-lst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-lua",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-met",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-out",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-pre",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-res",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-sch",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tex",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tmf",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tmp",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-tre",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-use",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-vir",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="data-zip",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="file-ini",
- ["loading"]="file-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-job",
- ["loading"]="file-job",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-lib",
- ["loading"]="file-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-mod",
- ["loading"]="file-mod",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-res",
- ["loading"]="file-res",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="file-syn",
- ["loading"]="file-syn",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-afm",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-afk",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="only used in luatex-fonts",
- ["filename"]="font-age",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-agl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="needs some documentation in usage",
- ["filename"]="font-aux",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="move more to the commands namespace",
- ["filename"]="font-chk",
- ["loading"]="font-chk",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-cid",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-col",
- ["loading"]="font-col",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-con",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be improved over time",
- ["filename"]="font-ctx",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-def",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="part of this code is obsolete",
- ["filename"]="font-enc",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="needs documentation at the tex end",
- ["filename"]="font-enh",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe some data tables can be be external",
- ["filename"]="font-ext",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="okay but can be improved",
- ["filename"]="font-fbk",
- ["loading"]="font-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ini",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-inj",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ldr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-log",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="currently rather minimalistic",
- ["filename"]="font-lua",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="the lum file support will be dropped / no map files anyway",
- ["filename"]="font-map",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-mis",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="when more scripts are supported we might end up with imp files",
- ["filename"]="font-ota",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otb",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otc",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otd",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otf",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oth",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-oti",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otn",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-otp",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-ott",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="is mostly replaced by lfg files",
- ["filename"]="font-pat",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-sol",
- ["loading"]="font-sol",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="also loaded on demand",
- ["filename"]="font-syn",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-tfm",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-trt",
- ["loading"]="font-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="font-vf",
- ["loading"]="font-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-epd",
- ["loading"]="grph-epd",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-fil",
- ["loading"]="grph-inc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-inc",
- ["loading"]="grph-inc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-raw",
- ["loading"]="grph-raw",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-swf",
- ["loading"]="grph-swf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="grph-u3d",
- ["loading"]="grph-u3d",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="experiment with graphic magick library",
- ["filename"]="grph-wnd",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="java-ini",
- ["loading"]="java-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="l-boolean",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-file",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-function",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-io",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-lpeg",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-math",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-md5",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-number",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-os",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-pdfview",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-set",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-string",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-table",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-unicode",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-url",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="l-xml",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-def",
- ["loading"]="lang-def",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-dis",
- ["loading"]="lang-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hyp",
- ["loading"]="lang-hyp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ini",
- ["loading"]="lang-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-lab",
- ["loading"]="lang-lab",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hyp",
- ["loading"]="lang-hyp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-txt",
- ["loading"]="lang-lab",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe another approach is nicer",
- ["filename"]="lang-url",
- ["loading"]="lang-url",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-wrd",
- ["loading"]="lang-wrd",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="more will end up here",
- ["filename"]="layo-ini",
- ["loading"]="layo-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-ano",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-res",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-col",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-enc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-epa",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-epd",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-fld",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-fmt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-grp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-mis",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-mov",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-nod",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-ren",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-swf",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-tag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-u3d",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-wid",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lpdf-xmp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["comment"]="replacement code for wd/ht/dp",
- ["filename"]="luat-bwc",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-cbk",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-cnf",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="maybe some code should move",
- ["filename"]="luat-cod",
- ["loading"]="luat-cod",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-env",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-exe",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-fio",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-fmt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be upgraded when we have Lua 5.2",
- ["filename"]="luat-ini",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="will be upgraded when we have Lua 5.2",
- ["filename"]="util-env",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-iop",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is likely to change some day",
- ["filename"]="luat-lua",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-mac",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-run",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="related to the socket code",
- ["filename"]="luat-soc",
- ["loading"]="on demand",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-sta",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="luat-sto",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-aux",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-css",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-ctx",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-ent",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-inf",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-lpt",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-mis",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-sor",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-tab",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-tex",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="lxml-xml",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-chart",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-database",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="m-nodechart",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="m-markdown",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-pstricks",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-spreadsheet",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="m-steps",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="math-act",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-frc",
- ["loading"]="math-frc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="could be made look nicer, but who cares",
- ["filename"]="math-dim",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="the code is related to math-vfu",
- ["filename"]="math-ext",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-fbk",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-frc",
- ["loading"]="math-frc",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="okay, but we might have a few more low level definers some day",
- ["filename"]="math-ini",
- ["loading"]="math-ini",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="math-map",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-noa",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-ren",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-tag",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-ttv",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="math-vfu",
- ["loading"]="math-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is just a first version",
- ["filename"]="meta-fun",
- ["loading"]="meta-fun",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-ini",
- ["loading"]="meta-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-lua",
- ["loading"]="meta-lua",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-fnt",
- ["loading"]="meta-fnt",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="could be done nicer nowadays but who needs it",
- ["filename"]="meta-pdf",
- ["loading"]="meta-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is historic code that we keep around",
- ["filename"]="meta-pdh",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="meta-tex",
- ["loading"]="meta-tex",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-ctx",
- ["loading"]="mlib-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-pdf",
- ["loading"]="mlib-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-pps",
- ["loading"]="mlib-pdf",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mlib-run",
- ["loading"]="mlib-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="this is an experiment, namespaces need to be dealt with properly",
- ["filename"]="mult-aux",
- ["loading"]="mult-aux",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="this is an experiment",
- ["filename"]="mult-chk",
- ["loading"]="mult-chk",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="mult-def",
- ["loading"]="mult-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-fun",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="mult-ini",
- ["loading"]="mult-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-low",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="all messages need to be checked",
- ["filename"]="mult-mes",
- ["loading"]="mult-ini",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-mps",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="used for generating editor lexing files",
- ["filename"]="mult-prm",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="node-acc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-aux",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-bck",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ext",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-fin",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-fnt",
- ["loading"]="font-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-mig",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ppt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-pro",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ref",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-res",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-rul",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-ser",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-shp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tex",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tra",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-snp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tsk",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-tst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="node-typ",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["comment"]="will be extended when we have opened up pdf objects",
- ["filename"]="pack-obj",
- ["loading"]="pack-obj",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="pack-rul",
- ["loading"]="pack-rul",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-otr",
- ["loading"]="page-otr",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="page-flt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-ins",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-lin",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-mix",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-pst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="page-str",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="phys-dim",
- ["loading"]="phys-dim",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-1",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-10",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-11",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-13",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-14",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-15",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-16",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-2",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-3",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-4",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-5",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-6",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-7",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-8",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-8859-9",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1250",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1251",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1252",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1253",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1254",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1255",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1256",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1257",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-cp1258",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="just a demo file",
- ["filename"]="regi-demo",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="regi-ini",
- ["loading"]="regi-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-features",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-missing",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-shapes",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-system",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-tables",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-vectors",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-counters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-frequencies",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-hyphenation",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-sorting",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-system",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-math-characters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-math-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-fonts-goodies",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-math-parameters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-pre-71",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="s-sql-tables",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-but",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-fld",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-hlp",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-ref",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="scrn-wid",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["comment"]="we can speed this up",
- ["filename"]="scrp-cjk",
- ["loading"]="scrp-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="we can speed this up",
- ["filename"]="scrp-eth",
- ["loading"]="scrp-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="scrp-ini",
- ["loading"]="scrp-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="sort-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="sort-lan",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-adj",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-ali",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-chr",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-hor",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="spac-ver",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="status-mkiv",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-bkm",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-blk",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-con",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-doc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-flt",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-ini",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-itm",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-lev",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-lst",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-mar",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-mat",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-not",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-num",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-ref",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-reg",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-rsc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-syn",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="strc-tag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="supp-box",
- ["loading"]="supp-box",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="supp-ran",
- ["loading"]="supp-ran",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="symb-ini",
- ["loading"]="symb-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="there will be more in here",
- ["filename"]="syst-aux",
- ["loading"]="syst-aux",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="do some tests with speedups (sprint)",
- ["filename"]="syst-con",
- ["loading"]="syst-con",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="do some tests with speedups (less tokens)",
- ["filename"]="syst-lua",
- ["loading"]="syst-lua",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="tabl-tbl",
- ["loading"]="tabl-tbl",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="work in progress",
- ["filename"]="tabl-xtb",
- ["loading"]="tabl-xtb",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="we need a well defined defintion moment",
- ["filename"]="task-ini",
- ["loading"]="task-ini",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-ini",
- ["loading"]="toks-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="toks-scn",
- ["loading"]="toks-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="must be applied in more places",
- ["filename"]="trac-ctx",
- ["loading"]="trac-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-deb",
- ["loading"]="trac-deb",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="for the moment somewhat private",
- ["filename"]="trac-fil",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-inf",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="will be redone and extended",
- ["filename"]="trac-lmx",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-log",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-xml",
- ["loading"]="mtxrun",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-exp",
- ["loading"]="mtxrun",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="experimental code, will be redone when lua 5.2",
- ["filename"]="trac-pro",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["comment"]="some code can better be in util-set",
- ["filename"]="trac-set",
- ["loading"]="luat-lib",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-tex",
- ["loading"]="trac-tex",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-tim",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-vis",
- ["loading"]="trac-vis",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="trac-jus",
- ["loading"]="trac-jus",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="type-ini",
- ["loading"]="type-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-bld",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-sus",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-brk",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-cap",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-cln",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-dig",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-dir",
- ["loading"]="typo-dir",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="work in progress",
- ["filename"]="typo-dha",
- ["loading"]="typo-dir",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-dua",
- ["loading"]="typo-dir",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["comment"]="work in progress",
- ["filename"]="typo-dub",
- ["loading"]="typo-dir",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-ini",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="typo-tal",
- ["loading"]="typo-tal",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-itc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-krn",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-mar",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-pag",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-drp",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-fln",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-man",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-prc",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-lan",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-rep",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="typo-spa",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="unic-ini",
- ["loading"]="unic-ini",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-deb",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-dim",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-fmt",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-jsn",
- ["loading"]="m-json",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lua",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-lib",
- ["loading"]="luat-lib",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="util-mrg",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-pck",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-prs",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-seq",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sql",
- ["loading"]="m-sql",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sta",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-sto",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-str",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-tab",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="util-tpl",
- ["loading"]="luat-lib",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-asciimath",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-calcmath",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-cals",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-chemml",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-ct",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-ldx",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="x-mathml",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-ini",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-aut",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-dat",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-oth",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-fnd",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-tra",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- {
- ["category"]="lua",
- ["filename"]="publ-usr",
- ["loading"]="publ-ini.mkiv",
- ["status"]="pending",
- },
- },
- ["main"]={
- {
- ["category"]="mkiv",
- ["filename"]="context",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="lus",
- ["comment"]="stub file for context",
- ["filename"]="context",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="metatex",
- ["loading"]="parent",
- ["status"]="pending",
- },
- {
- ["category"]="lus",
- ["comment"]="stub file for metatex",
- ["filename"]="metatex",
- ["loading"]="parent",
- ["status"]="pending",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-cs",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-de",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-en",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-fr",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-gb",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-it",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-nl",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-pe",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="cont-ro",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="we keep this around for historic reasons",
- ["filename"]="ppchtex",
- ["loading"]="never",
- ["status"]="okay",
- },
- },
- ["metafun"]={
- {
- ["category"]="mpiv",
- ["comment"]="maybe more delayed loading",
- ["filename"]="metafun",
- ["loading"]="parent",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-base",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-tool",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-mlib",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="sort of obsolete",
- ["filename"]="mp-core",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="maybe some nicer synonyms",
- ["filename"]="mp-page",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-butt",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-shap",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-grph",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-grid",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="a hack anyway",
- ["filename"]="mp-form",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-figs",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-func",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-text",
- ["loading"]="on demand",
- ["status"]="todo",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-crop",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="follows m-chart",
- ["filename"]="mp-char",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="follows m-steps",
- ["filename"]="mp-step",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-chem",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="maybe some namespace changes",
- ["filename"]="mp-abck",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="maybe some namespace changes",
- ["filename"]="mp-apos",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["comment"]="will be done when needed",
- ["filename"]="mp-asnc",
- ["loading"]="on demand",
- ["status"]="todo",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-back",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-bare",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-cows",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-fobg",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-grap",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-idea",
- ["loading"]="on demand",
- ["status"]="unknown",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-luas",
- ["loading"]="always",
- ["status"]="okay",
- },
- {
- ["category"]="mpiv",
- ["filename"]="mp-symb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["modules"]={
- {
- ["category"]="mkiv",
- ["comment"]="best use m-zint instead",
- ["filename"]="m-barcodes",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-chart",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is a placeholder (chemistry is built-in)",
- ["filename"]="m-chemic",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-cweb",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-database",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-nodechart",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="add-on for mtx-context",
- ["filename"]="m-directives",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-educat",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-fields",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-format",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-graph",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-ipsum",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-json",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-layout",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="add-on for mtx-context",
- ["filename"]="m-logcategories",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-markdown",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-mathcrap",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-mkii",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-mkivhacks",
- ["status"]="todo",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-morse",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-narrowtt",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-ntb-to-xtb",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-obsolete",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-oldfun",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-oldnum",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-pictex",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-pstricks",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="keep an eye on changes in lua code",
- ["filename"]="m-punk",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-spreadsheet",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="m-steps",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-subsub",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-sql",
- ["loading"]="module",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-timing",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="add-on for mtx-context",
- ["filename"]="m-trackers",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-translate",
- ["status"]="okay",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-chemml",
- ["status"]="todo",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-contml",
- ["status"]="todo",
- },
- {
- ["category"]="rng",
- ["filename"]="x-corres",
- ["status"]="todo",
- },
- {
- ["category"]="dtd",
- ["filename"]="x-fig-00",
- ["status"]="todo",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-fig-00",
- ["status"]="todo",
- },
- {
- ["category"]="ctx",
- ["filename"]="x-ldx",
- ["status"]="todo",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-mathml",
- ["status"]="todo",
- },
- {
- ["category"]="xsl",
- ["filename"]="x-om2cml",
- ["status"]="todo",
- },
- {
- ["category"]="xsl",
- ["filename"]="x-openmath",
- ["status"]="todo",
- },
- {
- ["category"]="ctx",
- ["comment"]="runner for x-pfs-01",
- ["filename"]="x-pfsense",
- ["status"]="okay",
- },
- {
- ["category"]="xsd",
- ["filename"]="x-physml",
- ["status"]="todo",
- },
- {
- ["category"]="xsl",
- ["filename"]="x-sm2om",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-units",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-visual",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="m-zint",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-02",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-03",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-abr-04",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-art-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-cdr-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-def-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-00",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-02",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-faq-03",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-10",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-20",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-21",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fnt-24",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-coverage",
- ["loading"]="s-fonts-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-features",
- ["loading"]="s-fonts-features",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-goodies",
- ["loading"]="s-fonts-goodies",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-missing",
- ["loading"]="s-fonts-missing",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-shapes",
- ["loading"]="s-fonts-shapes",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-system",
- ["loading"]="s-fonts-system",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-tables",
- ["loading"]="s-fonts-tables",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-fonts-vectors",
- ["loading"]="s-fonts-vectors",
- ["status"]="okay",
- },
- {
- ["category"]="mkvi",
- ["filename"]="s-inf-01",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-inf-02",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-inf-03",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-inf-04",
- ["status"]="todo",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-counters",
- ["loading"]="s-languages-counters",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-frequencies",
- ["loading"]="s-languages-frequencies",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="s-languages-hyphenation",
- ["loading"]="s-languages-hyphenation",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-languages-sorting",
- ["loading"]="s-languages-sorting",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-languages-system",
- ["loading"]="s-languages-system",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mag-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-map-10",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-characters",
- ["loading"]="s-math-characters",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-coverage",
- ["loading"]="s-math-coverage",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-extensibles",
- ["loading"]="s-math-extensibles",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-parameters",
- ["loading"]="s-math-parameters",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-math-repertoire",
- ["loading"]="s-math-repertoire",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mod-00",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mod-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-mod-02",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pages-statistics",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-physics-units",
- ["loading"]="s-physics-units",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-00",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-01",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-02",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-03",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-04",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-05",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-06",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-07",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-08",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-09",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-10",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-11",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-12",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-13",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-14",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-15",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-16",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-17",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-18",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-19",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-22",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-23",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-26",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-27",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-30",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-present-tiles",
- ["status"]="okay",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-50",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-60",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-61",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-62",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-63",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-64",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-66",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-67",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-68",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-69",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-70",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-pre-71",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-93",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="s-pre-96",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-reg-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="s-sql-tables",
- ["loading"]="s-sql-tables",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-asciimath",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-calcmath",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-cals",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-chemml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-ct",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-entities",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-foxet",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-ldx",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-mathml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-newmml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="pfsense xml configuration rendering",
- ["filename"]="x-pfs-01",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-physml",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-res-01",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-res-50",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="x-udhr",
- ["status"]="okay",
- },
- },
- ["optional"]={
- {
- ["category"]="mkiv",
- ["filename"]="bxml-apa",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="colo-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="always needs some work",
- ["filename"]="cont-new",
- ["loading"]="runtime",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["filename"]="font-run",
- ["loading"]="on demand",
- ["status"]="todo",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is an experimental module",
- ["filename"]="lxml-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is historic code that we keep around",
- ["filename"]="meta-pdh",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["comment"]="this is just a helper for generating files",
- ["filename"]="mult-prm",
- ["loading"]="never",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="page-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="mkiv",
- ["filename"]="spac-adj",
- ["loading"]="never",
- ["status"]="obsolete",
- },
- {
- ["category"]="mkiv",
- ["comment"]="replaced by a more modern variant",
- ["filename"]="supp-vis",
- ["loading"]="never",
- ["status"]="obsolete",
- },
- {
- ["category"]="mkiv",
- ["filename"]="symb-run",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["patterns"]={
- {
- ["category"]="lua",
- ["filename"]="lang-af",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-agr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ala",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-bg",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ca",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-cs",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-cy",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-da",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-de",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-deo",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-es",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-et",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-eu",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-fi",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-fr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-gb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-hu",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-is",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-it",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-la",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-lt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-lv",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ml",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-mn",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-nb",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-nl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-nn",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-pl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-pt",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ro",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-ru",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sk",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sl",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-sv",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-th",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-tk",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-tr",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-uk",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-us",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="lang-zh",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- {
- ["category"]="lua",
- ["filename"]="word-xx",
- ["loading"]="on demand",
- ["status"]="okay",
- },
- },
- ["resources"]={
- {
- ["category"]="ori",
- ["comment"]="template for a user configuration file (with suffix mkiv)",
- ["filename"]="cont-sys",
- ["loading"]="runtime",
- ["status"]="okay",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-base",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-characters",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-debug",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-error",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-fonttest",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-help",
- ["status"]="todo",
- },
- {
- ["category"]="lmx",
- ["filename"]="context-timing",
- ["status"]="todo",
- },
- {
- ["category"]="pdf",
- ["filename"]="context-version",
- ["status"]="todo",
- },
- {
- ["category"]="png",
- ["filename"]="context-version",
- ["status"]="todo",
- },
- {
- ["category"]="css",
- ["comment"]="layout specification for debug and error pages and web services",
- ["filename"]="context",
- ["status"]="okay",
- },
- {
- ["category"]="rme",
- ["comment"]="readme file",
- ["filename"]="context",
- ["status"]="okay",
- },
- {
- ["category"]="ctx",
- ["comment"]="example of a ctx file (for mtx-context)",
- ["filename"]="core-ctx",
- ["status"]="okay",
- },
- {
- ["category"]="css",
- ["filename"]="export-example",
- ["status"]="todo",
- },
- {
- ["category"]="rng",
- ["filename"]="export-example",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="export-example",
- ["status"]="todo",
- },
- {
- ["category"]="xml",
- ["comment"]="this file is auto-generated by mtx-language",
- ["filename"]="lang-all",
- ["status"]="okay",
- },
- {
- ["category"]="xml",
- ["filename"]="lpdf-pda",
- ["status"]="todo",
- },
- {
- ["category"]="xml",
- ["filename"]="lpdf-pdx",
- ["status"]="todo",
- },
- {
- ["category"]="rlx",
- ["filename"]="rlxcache",
- ["status"]="todo",
- },
- {
- ["category"]="rlx",
- ["filename"]="rlxtools",
- ["status"]="todo",
- },
- {
- ["category"]="ctx",
- ["filename"]="s-mod",
- ["status"]="todo",
- },
- {
- ["category"]="pdf",
- ["filename"]="status-files",
- ["status"]="todo",
- },
- {
- ["category"]="pdf",
- ["filename"]="status-lua",
- ["status"]="todo",
- },
- {
- ["category"]="tex",
- ["filename"]="status-mkiv",
- ["status"]="todo",
- },
- },
- ["todo"]={
- {
- ["category"]="lua",
- ["filename"]="core-run",
- ["status"]="idea",
- },
- },
-}
diff --git a/tex/context/base/mkiv/status-mkiv.tex b/tex/context/base/mkiv/status-mkiv.tex
deleted file mode 100644
index 8685c97ad..000000000
--- a/tex/context/base/mkiv/status-mkiv.tex
+++ /dev/null
@@ -1,328 +0,0 @@
-\usemodule[abr-02]
-
-\setupbodyfont
- [dejavu,9pt]
-
-\setuppapersize
- [A4,landscape]
-
-\setuplayout
- [width=middle,
- height=middle,
- backspace=.5cm,
- topspace=.5cm,
- footer=0pt,
- header=1.25cm]
-
-\setuphead
- [title]
- [style=\bfa,
- page=yes,
- after={\blank[line]}]
-
-\setuppagenumbering
- [location=]
-
-\setupheadertexts
- [\currentdate]
- [MkIV Status / Page \pagenumber]
-
-% \showmakeup
-% \showallmakeup
-
-\starttext
-
-% logs.report (immediate) versus logs.messenger (in flow)
-
-\starttitle[title=Todo]
-
-\startitemize[packed]
- \startitem currently the new namespace prefixes are not consistent but this
- will be done when we're satisfied with one scheme \stopitem
- \startitem there will be additional columns in the table, like for namespace
- so we need another round of checking then \stopitem
- \startitem the lua code will be cleaned up upgraded as some is quite old
- and experimental \stopitem
- \startitem we need a proper dependency tree and better defined loading order \stopitem
- \startitem all dotag.. will be moved to the tags_.. namespace \stopitem
- \startitem we need to check what messages are gone (i.e.\ clean up mult-mes) \stopitem
- \startitem some commands can go from mult-def (and the xml file) \stopitem
- \startitem check for setuphandler vs simplesetuphandler \stopitem
- \startitem for the moment we will go for \type {xxxx_} namespaces that (mostly) match
- the filename but later we can replace these by longer names (via a script) so
- module writers should {\bf not} use the core commands with \type{_} in the
- name \stopitem
- \startitem the message system will be unified \stopitem
- \startitem maybe rename dowhatevertexcommand to fromluawhatevertexcommand \stopitem
- \startitem consider moving setups directly to lua end (e.g. in characterspacing, breakpoint, bitmaps etc.) \stopitem
- \startitem more local temporary \type {\temp...} will become \type {\p_...} \stopitem
- \startitem check all ctxlua calls for ctxcommand \stopitem
- \startitem rename all those \type {\current}s in strc \stopitem
- \startitem check \type {option} vs \type {options} \stopitem
- \startitem check \type {type} vs \type {kind} \stopitem
- \startitem check \type {label} vs \type {name} vs \type {tag} \stopitem
- \startitem check \type {limop}, different limops should should be classes \stopitem
- \startitem too many positions in simple files (itemize etc) \stopitem
- \startitem math domains/dictionaries \stopitem
- \startitem xtables don't span vertically with multilines (yet) \stopitem
- \startitem notes in mixed columns \stopitem
- \startitem floats in mixed columns \stopitem
- \startitem check return values \type {os.execute} \stopitem
- \startitem more r, d, k in xml code \stopitem
- \startitem mathml, more in \LUA \stopitem
- \startitem style: font-size, font, color handling in \HTML\ (lxml-css) \stopitem
- \startitem a \type {\name {A.B.C DEF}} auto-nobreakspace \stopitem
- \startitem redo \CWEB\ module with \LUA \stopitem
- \startitem maybe move characters.blocks to its own file \stopitem
- \startitem more local context = context in \LUA\ files \stopitem
- \startitem check and optimize all storage.register and locals (cosmetics) \stopitem
- \startitem check all used modules in \LUA\ (and local them) \stopitem
- \startitem environment and basic lua helpers are now spread over too many files \stopitem
- \startitem isolate tracers and showers \stopitem
- \startitem check all possible usage of ctxcommand \stopitem
- \startitem there are more s-* modules, like s-fnt-41 \stopitem
- \startitem check (un)marked tables \stopitem
-\stopitemize
-
-\stoptitle
-
-\starttitle[title=To keep an eye on]
-
-\startitemize[packed]
- \startitem Currently lpeg replacements interpret the percent sign so we need to escape it. \stopitem
- \startitem Currently numbers and strings are cast in comparisons bu tthat might change in the future. \stopitem
-\stopitemize
-
-\stoptitle
-
-\definehighlight[notabenered] [color=darkred, style=bold]
-\definehighlight[notabeneblue] [color=darkblue, style=bold]
-\definehighlight[notabeneyellow] [color=darkyellow, style=bold]
-\definehighlight[notabenemagenta][color=darkmagenta,style=bold]
-
-\startluacode
-
- local coremodules = dofile("status-mkiv.lua")
-
- local valid = table.tohash {
- "toks", "attr", "page", "buff", "font", "colo", "phys", "supp", "typo", "strc",
- "syst", "tabl", "spac", "scrn", "lang", "lxml", "mlib", "java", "pack", "math",
- "symb", "grph", "anch", "luat", "mult", "back", "node", "meta", "norm", "catc",
- "cldf", "file", "char", "core", "layo", "trac", "cont", "regi", "enco", "hand",
- "unic", "sort", "blob", "type", "scrp", "prop", "chem", "bibl", "task",
- "whatever", "mp", "s", "x", "m", "mtx",
- }
-
- local specialcategories = {
- mkvi = true,
- }
-
- local what = {
- "main", "core", "lua", "optional", "implementations", "extra", "extras", "metafun", "modules", "resources"
- }
-
- local totaltodo = 0
- local totalpending = 0
- local totalobsolete = 0
- local totalloaded = 0
-
- local function summary(nofloaded,noftodo,nofpending,nofobsolete)
-
- local nofdone = nofloaded - noftodo - nofpending - nofobsolete
-
- context.starttabulate { "|B|r|" }
- context.HL()
- context.NC() context("done") context.NC() context(nofdone) context.NC() context.NR()
- context.NC() context("todo") context.NC() context(noftodo) context.NC() context.NR()
- context.NC() context("pending") context.NC() context(nofpending) context.NC() context.NR()
- context.NC() context("obsolete") context.NC() context(nofobsolete) context.NC() context.NR()
- context.HL()
- context.NC() context("loaded") context.NC() context(nofloaded) context.NC() context.NR()
- context.HL()
- context.stoptabulate()
-
- end
-
- if coremodules then
-
- local function tabelize(loaded,what)
-
- if loaded then
-
- local noftodo = 0
- local nofpending = 0
- local nofobsolete = 0
- local nofloaded = #loaded
- local categories = { }
-
- for k, v in next, valid do
- categories[k] = { }
- end
-
- for i=1,nofloaded do
- local l = loaded[i]
- l.order = i
- local category = string.match(l.filename,"([^%-]+)%-") or "whatever"
- local c = categories[category]
- if c then
- c[#c+1] = l
- end
- end
-
- for k, loaded in table.sortedhash(categories) do
-
- local nofloaded = #loaded
-
- if nofloaded > 0 then
-
- table.sort(loaded,function(a,b) return a.filename < b.filename end) -- in place
-
- context.starttitle { title = string.format("%s: %s",what,k) }
-
- context.starttabulate { "|Tr|Tlw(3em)|Tlw(12em)|Tlw(12em)|Tlw(4em)|Tl|Tl|Tl|Tp|" }
- context.NC() context.bold("order")
- context.NC() context.bold("kind")
- context.NC() context.bold("file")
- context.NC() context.bold("loading")
- context.NC() context.bold("status")
- context.NC() context.bold("reference")
- context.NC() context.bold("manual")
- context.NC() context.bold("wiki")
- context.NC() context.bold("comment")
- context.NC() context.NR()
- context.HL()
- for i=1,nofloaded do
- local module = loaded[i]
- local status = module.status
- local category = module.category
- local filename = module.filename
- context.NC()
- context(module.order)
- context.NC()
- if specialcategories[category] then
- context.notabeneblue(category)
- else
- context(category)
- end
- context.NC()
- if #filename>20 then
- context(string.sub(filename,1,18) .. "..")
- else
- context(filename)
- end
- context.NC()
- context(module.loading)
- context.NC()
- if status == "todo" then
- context.notabenered(status)
- noftodo = noftodo + 1
- elseif status == "pending" then
- context.notabeneyellow(status)
- nofpending = nofpending + 1
- elseif status == "obsolete" then
- context.notabenemagenta(status)
- nofobsolete = nofobsolete + 1
- else
- context(status)
- end
- context.NC() context(module.reference)
- context.NC() context(module.manual)
- context.NC() context(module.wiki)
- context.NC() context(module.comment)
- context.NC() context.NR()
- end
- context.stoptabulate()
-
- context.stoptitle()
-
- end
-
- end
-
- context.starttitle { title = string.format("summary of %s modules",what) }
-
- summary(nofloaded,noftodo,nofpending,nofobsolete)
-
- context.stoptitle()
-
- totaltodo = totaltodo + noftodo
- totalpending = totalpending + nofpending
- totalobsolete = totalobsolete + nofobsolete
- totalloaded = totalloaded + nofloaded
-
- end
-
- end
-
- for i=1,#what do
- tabelize(coremodules[what[i]],what[i])
- end
-
- end
-
- -- context.starttitle { title = "Valid prefixes" }
- --
- -- for namespace, data in table.sortedhash(namespaces) do
- -- if valid[namespace] then
- -- context.type(namespace)
- -- end
- -- context.par()
- -- end
- --
- -- context.stoptitle()
-
- context.starttitle { title = string.format("summary of all",what) }
-
- summary(totalloaded,totaltodo,totalpending,totalobsolete)
-
- context.stoptitle()
-
- if io.exists("status-namespaces.lua") then
-
- context.starttitle { title = "messy namespaces" }
-
- local namespaces = dofile("status-namespaces.lua")
-
- for namespace, data in table.sortedhash(namespaces) do
- if valid[namespace] then
- else
- context(namespace)
- end
- context.par()
- end
-
- context.stoptitle()
-
- end
-
- if io.exists("status-registers.lua") then
-
- context.starttitle { title = "messy registers" }
-
- local registers = dofile("status-registers.lua")
-
- for register, data in table.sortedhash(registers) do
- context(register)
- context.par()
- for name in table.sortedhash(data) do
- context.quad()
- context.type(name)
- context.par()
- end
- context.par()
- end
-
- context.stoptitle()
-
- end
-
- context.starttitle { title = "callbacks" }
-
- commands.showcallbacks()
-
- context.stoptitle()
-
-\stopluacode
-
-
-\stoptext
diff --git a/tex/context/base/mkiv/strc-bkm.mkiv b/tex/context/base/mkiv/strc-bkm.mkiv
index 5a8dba562..8172030b1 100644
--- a/tex/context/base/mkiv/strc-bkm.mkiv
+++ b/tex/context/base/mkiv/strc-bkm.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Bookmarks}
-\registerctxluafile{strc-bkm}{1.001}
+\registerctxluafile{strc-bkm}{}
% \enabledirectives[references.bookmarks.preroll]
diff --git a/tex/context/base/mkiv/strc-blk.lua b/tex/context/base/mkiv/strc-blk.lua
index 0ababcfc0..492abc768 100644
--- a/tex/context/base/mkiv/strc-blk.lua
+++ b/tex/context/base/mkiv/strc-blk.lua
@@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['strc-blk'] = {
-- this one runs on top of buffers and structure
-local type = type
+local type, next = type, next
local find, format, validstring = string.find, string.format, string.valid
local settings_to_set, settings_to_array = utilities.parsers.settings_to_set, utilities.parsers.settings_to_array
local allocate = utilities.storage.allocate
diff --git a/tex/context/base/mkiv/strc-blk.mkiv b/tex/context/base/mkiv/strc-blk.mkiv
index fe259d223..c42bb25ec 100644
--- a/tex/context/base/mkiv/strc-blk.mkiv
+++ b/tex/context/base/mkiv/strc-blk.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Blockmoves}
-\registerctxluafile{strc-blk}{1.001}
+\registerctxluafile{strc-blk}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-con.mkvi b/tex/context/base/mkiv/strc-con.mkvi
index 18ce17355..57b69cc7f 100644
--- a/tex/context/base/mkiv/strc-con.mkvi
+++ b/tex/context/base/mkiv/strc-con.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Constructions}
-\registerctxluafile{strc-con}{1.001}
+\registerctxluafile{strc-con}{}
% todo: check why \copy .. probably mkii leftover
%
@@ -73,6 +73,8 @@
\c!display=\v!yes,
\c!width=7\emwidth,
\c!distance=\emwidth,
+ \c!stretch=.50,
+ \c!shrink=.25,
\c!titledistance=.5\emwidth,
%c!hang=,
%c!sample=,
@@ -186,6 +188,27 @@
% \ifdefined\dotagsetconstruction \else \let\dotagsetconstruction\relax \fi
+% \startuseMPgraphic{MyFrame}
+% picture p ; numeric o ; path a, b ; pair c ;
+% p := textext.rt("\FunnyFramed{\currentconstructiontext}") ;
+% % p := textext.rt("\wrappedconstruction{\FunnyFramed{\currentconstructiontext}}") ;
+% % p := textext.rt("\FunnyFramed{\wrappedconstruction{\currentconstructiontext}}") ;
+% % p := textext.rt("\FunnyFramed{\wrappedcurrentconstruction}") ;
+% ...
+% \stopuseMPgraphic
+%
+% \defineoverlay[MyFrame][\useMPgraphic{MyFrame}]
+%
+% \defineframedtext
+% [MyText]
+% [frame=off,
+% background=MyFrame]
+%
+% \defineenumeration[Theorem]
+% [alternative=empty,
+% before=\startMyText,
+% after=\stopMyText]
+
\newtoks\everyconstruction
\unexpanded\def\currentconstructiontext
@@ -199,6 +222,24 @@
\p_strc_constructions_sample
\endstrut}
+\unexpanded\def\wrappedconstruction#1%
+ {\begingroup
+ \iftrialtypesetting \else
+ \currentconstructionsynchronize
+ \attribute\destinationattribute\currentconstructionattribute\relax
+ \fi
+ #1%
+ \endgroup}
+
+\unexpanded\def\wrappedcurrentconstruction
+ {\begingroup
+ \iftrialtypesetting \else
+ \currentconstructionsynchronize
+ \attribute\destinationattribute\currentconstructionattribute\relax
+ \fi
+ \currentconstructiontext
+ \endgroup}
+
\unexpanded\setvalue{\??constructionmainhandler\v!construction}#1%
{\iftrialtypesetting \else
\begingroup
@@ -410,8 +451,12 @@
\installcommandhandler \??constructionalternative {constructionalternative} \??constructionalternative
\setupconstructionalternative
- [\c!width=\constructionparameter\c!width,
- \c!distance=\constructionparameter\c!distance]
+ [%c!left=, % no inheritance
+ %c!right=,% no inheritance
+ \c!width=\constructionparameter\c!width,
+ \c!distance=\constructionparameter\c!distance,
+ \c!stretch=\constructionparameter\c!stretch,
+ \c!shrink=\constructionparameter\c!shrink]
\defineconstructionalternative
[\v!left]
@@ -463,7 +508,7 @@
\defineconstructionalternative
[\v!hanging]
- [%\c!width=\v!fit,
+ [%\c!width=.75\emwidth
\c!renderingsetup=\??constructionrenderings:\v!hanging]
\defineconstructionalternative
@@ -475,6 +520,15 @@
[\v!command]
[\c!renderingsetup=\??constructionrenderings:\v!command]
+\defineconstructionalternative
+ [\v!empty]
+ [\c!renderingsetup=\??constructionrenderings:\v!empty]
+
+\defineconstructionalternative
+ [\v!label]
+ [\c!renderingsetup=\??constructionrenderings:\v!label,
+ \c!width=]
+
\newbox \constructionheadbox
\newskip \leftconstructionskip
\newskip \rightconstructionskip
@@ -778,7 +832,10 @@
\flushconstructionheadbox
\ifconditional\c_strc_constructions_distance_none \else
\nobreak
- \hskip\constructionsheaddistance \s!plus .5\constructionsheaddistance \s!minus .25\constructionsheaddistance\relax
+ \hskip\constructionsheaddistance
+ \s!plus \constructionalternativeparameter\c!stretch\constructionsheaddistance
+ \s!minus\constructionalternativeparameter\c!shrink \constructionsheaddistance
+ \relax
\fi
\useconstructionstyleandcolor\c!style\c!color
\ignorespaces
@@ -822,9 +879,9 @@
\ifconditional\c_strc_constructions_distance_none \else
% we used to have just a kern, but long lines look bad then
\scratchdistance\ifdim\constructionsheaddistance=\zeropoint .75\emwidth\else\constructionsheaddistance\fi
- \hskip \scratchdistance
- \s!plus .25\scratchdistance
- \s!minus.25\scratchdistance
+ \hskip \scratchdistance
+ \s!plus \constructionalternativeparameter\c!stretch\scratchdistance
+ \s!minus\constructionalternativeparameter\c!shrink \scratchdistance
\fi
\ifhmode
\ifx\p_strc_constructions_width\v!line
@@ -848,6 +905,19 @@
\ignorespaces
\stopsetups
+\startsetups[\??constructionrenderings:\v!empty]
+ \noindent
+ \useconstructionstyleandcolor\c!style\c!color
+ \ignorespaces
+\stopsetups
+
+\startsetups[\??constructionrenderings:\v!label]
+ \dontleavehmode
+ \begingroup
+ \constructionparameter\c!headcommand{\flushconstructionheadbox}
+ \endgroup
+\stopsetups
+
% you can use \placeclosesymbol or \qed to place a symbol at the end of a
% construction
diff --git a/tex/context/base/mkiv/strc-def.mkiv b/tex/context/base/mkiv/strc-def.mkiv
index b2e86c140..8d1fa371f 100644
--- a/tex/context/base/mkiv/strc-def.mkiv
+++ b/tex/context/base/mkiv/strc-def.mkiv
@@ -12,7 +12,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Definitions}
-%registerctxluafile{strc-def}{1.001}
+%registerctxluafile{strc-def}{}
\unprotect
@@ -181,6 +181,75 @@
[\v!appendix\c!label=\v!subsubsection,
\v!bodypart\c!label=\v!subsubsection] % bijlageconversie=\Character
+% extras
+
+\definesection[\s!section-8] % subsubsubsubsubsection
+\definesection[\s!section-9] % subsubsubsubsubsubsection
+\definesection[\s!section-10] % subsubsubsubsubsubsubsection
+\definesection[\s!section-11] % subsubsubsubsubsubsubsubsection
+\definesection[\s!section-12] % subsubsubsubsubsubsubsubsubsection
+
+\definehead
+ [\v!subsubsubsubsubsection]
+ [\c!section=\s!section-8,
+ \c!default=\v!subsubsubsubsection]
+
+\definehead
+ [\v!subsubsubsubsubsubsection]
+ [\c!section=\s!section-9,
+ \c!default=\v!subsubsubsubsubsection]
+
+\definehead
+ [\v!subsubsubsubsubsubsubsection]
+ [\c!section=\s!section-10,
+ \c!default=\v!subsubsubsubsubsubsection]
+
+\definehead
+ [\v!subsubsubsubsubsubsubsubsection]
+ [\c!section=\s!section-11,
+ \c!default=\v!subsubsubsubsubsubsubsection]
+
+\definehead
+ [\v!subsubsubsubsubsubsubsubsubsection]
+ [\c!section=\s!section-12,
+ \c!default=\v!subsubsubsubsubsubsubsubsection]
+
+\definehead
+ [\v!subsubsubsubsubsubject]
+ [\c!coupling=\v!subsubsubsubsubsection,
+ \c!default=\v!subsubsubsubsubsection,
+ \c!incrementnumber=\v!no]
+
+\definehead
+ [\v!subsubsubsubsubsubsubject]
+ [\c!coupling=\v!subsubsubsubsubsubsection,
+ \c!default=\v!subsubsubsubsubsubsection,
+ \c!incrementnumber=\v!no]
+
+\definehead
+ [\v!subsubsubsubsubsubsubsubject]
+ [\c!coupling=\v!subsubsubsubsubsubsubsection,
+ \c!default=\v!subsubsubsubsubsubsubsection,
+ \c!incrementnumber=\v!no]
+
+\definehead
+ [\v!subsubsubsubsubsubsubsubsubject]
+ [\c!coupling=\v!subsubsubsubsubsubsubsubsection,
+ \c!default=\v!subsubsubsubsubsubsubsubsection,
+ \c!incrementnumber=\v!no]
+
+\definehead
+ [\v!subsubsubsubsubsubsubsubsubsubject]
+ [\c!coupling=\v!subsubsubsubsubsubsubsubsubsection,
+ \c!default=\v!subsubsubsubsubsubsubsubsubsection,
+ \c!incrementnumber=\v!no]
+
+\defineprefixset
+ [\v!all]
+ [section-1,section-2,section-3,section-4,section-5,section-6,section-7,%
+ section-8,section-9,section-10,section-11,section-12]
+ []
+
% \setuphead
\setuphead
diff --git a/tex/context/base/mkiv/strc-doc.lua b/tex/context/base/mkiv/strc-doc.lua
index 57fff5a21..93d8db56b 100644
--- a/tex/context/base/mkiv/strc-doc.lua
+++ b/tex/context/base/mkiv/strc-doc.lua
@@ -129,8 +129,8 @@ sections.tobesaved = tobesaved
--
-- job.register('structures.sections.collected', tobesaved, initializer)
-sections.registered = sections.registered or allocate()
-local registered = sections.registered
+local registered = sections.registered or allocate()
+sections.registered = registered
storage.register("structures/sections/registered", registered, "structures.sections.registered")
@@ -411,11 +411,6 @@ function sections.setentry(given)
v[2](k)
end
end
--- local n = { }
--- for i=1,newdepth do
--- n[i] = numbers[i]
--- end
--- numberdata.numbers = n
numberdata.numbers = { unpack(numbers,1,newdepth) }
if not numberdata.block then
numberdata.block = getcurrentblock() -- also in references
@@ -447,8 +442,9 @@ function sections.reportstructure()
local d = status[depth]
local o = concat(ownnumbers,".",1,depth)
local n = (numbers and concat(numbers,".",1,min(depth,#numbers))) or 0
- local l = d.titledata.title or ""
- local t = (l ~= "" and l) or d.titledata.title or "[no title]"
+ local t = d.titledata.title
+ local l = t or ""
+ local t = (l ~= "" and l) or t or "[no title]"
local m = d.metadata.name
if o and not find(o,"^%.*$") then
report_structure("%s @ level %i : (%s) %s -> %s",m,depth,n,o,t)
@@ -460,17 +456,24 @@ function sections.reportstructure()
end
end
+-- function sections.setnumber(depth,n)
+-- local forced, depth, new = data.forced, depth or data.depth, tonumber(n) or 0
+-- if type(n) == "string" then
+-- if find(n,"^[%+%-]") then
+-- forced[depth] = { "add", new }
+-- else
+-- forced[depth] = { "set", new }
+-- end
+-- else
+-- forced[depth] = { "set", new }
+-- end
+-- end
+
function sections.setnumber(depth,n)
- local forced, depth, new = data.forced, depth or data.depth, tonumber(n)
- if type(n) == "string" then
- if find(n,"^[%+%-]") then
- forced[depth] = { "add", new }
- else
- forced[depth] = { "set", new }
- end
- else
- forced[depth] = { "set", new }
- end
+ data.forced[depth or data.depth] = {
+ type(n) == "string" and find(n,"^[%+%-]") and "add" or "set",
+ tonumber(n) or 0
+ }
end
function sections.numberatdepth(depth)
@@ -774,10 +777,13 @@ function sections.typesetnumber(entry,kind,...) -- kind='section','number','pref
if number then
local ownnumber = ownnumbers and ownnumbers[index] or ""
if number > 0 or (ownnumber ~= "") then
- if bb == 0 then bb = k end
+ if bb == 0 then
+ bb = k
+ end
ee = k
- else
- bb, ee = 0, 0
+ elseif criterium >= 0 then
+ bb = 0
+ ee = 0
end
else
break
@@ -1028,7 +1034,7 @@ implement { name = "namedstructureuservariable", actions = sections.userdata,
implement { name = "setstructurelevel", actions = sections.setlevel, arguments = { "string", "string" } }
implement { name = "getstructurelevel", actions = sections.getcurrentlevel, arguments = { "string" } }
-implement { name = "setstructurenumber", actions = sections.setnumber, arguments = { "integer", "string" } }
+implement { name = "setstructurenumber", actions = sections.setnumber, arguments = { "integer", "string" } } -- string as we support +-
implement { name = "getstructurenumber", actions = sections.getnumber, arguments = { "integer" } }
implement { name = "getsomestructurenumber", actions = sections.getnumber, arguments = { "integer", "string" } }
implement { name = "getfullstructurenumber", actions = sections.fullnumber, arguments = { "integer" } }
@@ -1099,6 +1105,7 @@ implement {
{ "segments" },
{ "ownnumber" },
{ "language" },
+ { "criterium" },
},
},
{ "userdata" },
diff --git a/tex/context/base/mkiv/strc-doc.mkiv b/tex/context/base/mkiv/strc-doc.mkiv
index 5f40521fa..805525487 100644
--- a/tex/context/base/mkiv/strc-doc.mkiv
+++ b/tex/context/base/mkiv/strc-doc.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Document Structure}
-\registerctxluafile{strc-doc}{1.001}
+\registerctxluafile{strc-doc}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-enu.mkvi b/tex/context/base/mkiv/strc-enu.mkvi
index 8eff706bb..4680a3981 100644
--- a/tex/context/base/mkiv/strc-enu.mkvi
+++ b/tex/context/base/mkiv/strc-enu.mkvi
@@ -88,7 +88,7 @@
%\c!headcolor=,
%\c!titlecolor=,
\c!width=8\emwidth,
- \c!distance=\zeropoint,
+ %\c!distance=\zeropoint,
\c!distance=\emwidth,
\c!titledistance=.5\emwidth,
%\c!hang=,
@@ -111,7 +111,6 @@
\c!expansion=\v!no,
%\c!xmlsetup=,
%\s!catcodes=,
- %
\c!way=\v!by\v!text,
\c!prefix=\v!no,
\c!prefixconnector=.,
diff --git a/tex/context/base/mkiv/strc-flt.mkvi b/tex/context/base/mkiv/strc-flt.mkvi
index 3ad2e86fc..69881037e 100644
--- a/tex/context/base/mkiv/strc-flt.mkvi
+++ b/tex/context/base/mkiv/strc-flt.mkvi
@@ -15,7 +15,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Float Numbering}
-\registerctxluafile{strc-flt}{1.001}
+\registerctxluafile{strc-flt}{}
\unprotect
@@ -97,17 +97,17 @@
\c!textcolor=,
\c!align=,
\c!number=\v!yes,
- % \c!expansion=\v!no
- % \c!prefix=\v!no,
- % \c!prefixconnector=.,
- % \c!way=\v!by\v!chapter,
- % \c!prefixsegments=2:2,
- % \c!way=\@@nrway,
- % \c!blockway=\@@nrblockway,
- % \c!sectionnumber=\@@nrsectionnumber,
- % \c!separator=\@@koseparator,
- % \c!starter=\@@kostarter,
- % \c!stopper=\@@kostopper,
+ % \c!expansion=,
+ % \c!prefix=,
+ % \c!prefixconnector=,
+ % \c!way=,
+ % \c!prefixsegments=,
+ % \c!way=,
+ % \c!blockway=,
+ % \c!sectionnumber=,
+ % \c!separator=,
+ % \c!starter=,
+ % \c!stopper=,
\c!suffixseparator=, % currently rather hard coded
\c!suffix=\floatcaptionsuffix,
\c!distance=\emwidth,
@@ -141,6 +141,9 @@
\c!spaceafter=\v!big,
\c!sidespacebefore=\rootfloatparameter\c!spacebefore,
\c!sidespaceafter=\rootfloatparameter\c!spaceafter,
+ \c!sidespaceinbetween=\rootfloatparameter\c!spacebefore,
+ \c!spacebeforeside=, % extra, not part of, can be used to add whitespace before text
+ \c!spaceafterside=, % idem
\c!sidealign=\v!normal,
\c!textmethod=\ifgridsnapping2\else0\fi, % 0=raw 1=safe (.99pg) 2=tight (-1pt) % THIS WILL CHANGE
\c!sidemethod=\ifgridsnapping2\else1\fi, % 0=raw 1=safe (.99pg) 2=tight (-1pt) % THIS WILL CHANGE
@@ -154,7 +157,7 @@
\c!outermargin=\zeropoint, % idem
\c!leftmargindistance=\zeropoint,
\c!rightmargindistance=\floatparameter\c!leftmargindistance,
- \c!step=\v!big, % the flush side float step (big=line, medium=halfline, small=quarterline, depth=halfline with normaldepth)
+ \c!step=\v!small, % the flush side float step (big, medium, small : always depth)
\c!ntop=2,
\c!nbottom=0,
\c!nlines=4, % used?
@@ -165,6 +168,7 @@
%\c!bottombefore=, % e.g. \vfill
%\c!bottomafter=,
%\c!default=, % default location
+ \c!sidethreshold=.5\strutdp, % set to "old" to check with old method
\c!numbering=\v!yes]
%D Individial settings:
@@ -341,7 +345,8 @@
\hbox{\usefloatcaptionstyleandcolor\c!headstyle\c!headcolor\thecurrentfloatnumber}%
\ifnofloatcaption \else \ifemptyfloatcaption \else
\doifelsenothing{\floatcaptionparameter\c!spaceinbetween}
- {\scratchskip\floatcaptionparameter\c!distance\relax
+ {\floatcaptionparameter\c!headseparator\relax
+ \scratchskip\floatcaptionparameter\c!distance\relax
\dotfskip\scratchskip\emergencystretch.5\scratchskip}
{\blank[\floatcaptionparameter\c!spaceinbetween]}%
\fi \fi
@@ -358,11 +363,6 @@
\endgroup
\doifsomething{\floatcaptionparameter\c!spaceafter}{\blank[\floatcaptionparameter\c!spaceafter]}}
-% \newif\iftracecaptions
-%
-% \def\settracedcaptionbox
-% {\iftracecaptions\setbox\b_strc_floats_caption\ruledhbox{\box\b_strc_floats_caption}\fi}
-
% \definefloat [figure-1] [figure]
% \definefloat [figure-2] [figure]
% \setupfloat [figure-1] [location=left,leftmargin=10mm]
@@ -409,30 +409,6 @@
\fi
\fi}
-% The tricky part of getting float related two pass data is
-% that we should fetch is early but can only save it with
-% the composed float box; this determines the order: get it
-% before saving it.
-
-% We had this:
-%
-% \definetwopasslist{\s!float\s!data} \newcounter\noffloatdata
-%
-% \let\strc_float_realpage\realpageno % used for odd/even determination, can be combined with nodelocation
-%
-% \def\strc_float_save_data % \expanded ... will change in mkiv
-% {\doglobal\increment\noffloatdata
-% \lazysavetaggedtwopassdata{\s!float\s!data}{\noffloatdata}{\noffloatpages}{\noexpand\realfolio}}% later {}{}{}{} and \getfirst...
-%
-% \def\strc_float_load_data % precedes save !
-% {\doglobal\increment\noffloatpages
-% \findtwopassdata{\s!float\s!data}{\noffloatpages}%
-% \ifconditional\twopassdatafound
-% \globallet\strc_float_realpage\twopassdata
-% \else
-% \globallet\strc_float_realpage\realpageno % \realfolio
-% \fi}
-
%D We can do this ...
%D
%D \starttyping
@@ -510,13 +486,24 @@
\let\m_strc_floats_saved_userdata\empty
\let\currentfloatcaption\currentfloat}
+\let\askedfloatmethod \empty
+\let\askedfloatoptions\empty
+
\def\strc_floats_reset_variables
{\global\emptyfloatcaptionfalse
\global\nofloatcaptionfalse
- \global\nofloatnumberfalse}
+ \global\nofloatnumberfalse
+ \global\let\askedfloatmethod \empty
+ \global\let\askedfloatoptions\empty}
% place
+\let\floatlabel \empty
+\let\floatcolumn \empty
+\let\floatrow \empty
+\let\floatlocation \empty
+\let\floatlocationmethod\empty
+
\def\strc_floats_analyze_location
{% moved here, will do more
\let\floatlabel \empty
@@ -538,11 +525,14 @@
\setupfloat[\c!spacebefore=\v!none,\c!spaceafter=\v!none]%
\to \c_floats_every_table_float
+\ifdefined\dotagregisterfloat \else \let\dotagregisterfloat\gobbletwoarguments \fi
+
\def\strc_floats_place_indeed[#location][#reference]#caption%
{\strc_floats_reset_variables
+ \xdef\askedfloatoptions{#location}%
\edef\floatlocation{#location}%
\ifx\floatlocation\empty
- \edef\floatlocation{\floatparameter\c!default}% beware of a clash between alignment locations
+ \edef\floatlocation{\floatparameter\c!default}% beware of a clash between alignment locations
\fi
\ifintable
\the\c_floats_every_table_float
@@ -731,6 +721,7 @@
\strc_floats_check_extra_actions
\strc_floats_analyze_variables_two
\strc_floats_place_packaged_boxes
+ \dotagregisterfloat\askedfloatoptions\askedfloatmethod
\dostoptagged % tricky .... needs checking
% we need to carry over the par because of side floats
\global\d_page_sides_downshift \zeropoint
@@ -743,6 +734,33 @@
% nicer is a bunch of states and one loop that sets those states
+\newdimen\d_strc_floats_margin
+\newdimen\d_strc_floats_top
+\newdimen\d_strc_floats_bottom
+
+% \def\strc_floats_calculate_skip#target#skip%
+% {\begingroup
+% \edef\askedfloatskip{\rootfloatparameter#skip}%
+% \ifx\askedfloatskip\empty
+% \global#target\zeropoint
+% \else\ifx\askedfloatskip\v!none
+% \global#target\zeropoint
+% \else
+% \setbox\scratchbox\vbox{\whitespace\blank[\askedfloatskip]}% todo: move whitespace inside blank
+% \global#target\ht\scratchbox
+% \fi\fi
+% \endgroup}
+
+\def\strc_floats_calculate_skip#target#skip%
+ {\begingroup
+ \edef\p_blank{\rootfloatparameter#skip}%
+ \ifx\p_blank\v!nowhite
+ \edef\p_blank{-\v!white}%
+ \fi
+ \prerollblank[\p_blank]%
+ \global#target\prerolledblank
+ \endgroup}
+
\def\strc_floats_analyze_variables_two
{\ifinsidecolumns
\global\setfalse\c_strc_floats_par_float
@@ -751,13 +769,37 @@
{\global\settrue \c_strc_floats_par_float}%
{\global\setfalse\c_strc_floats_par_float}%
\fi
- \global\d_page_sides_shift \zeropoint
- \global\d_page_sides_maximum \zeropoint
- \global\c_page_sides_method \floatparameter\c!sidemethod
- \global\c_page_one_float_method \floatparameter\c!textmethod
- \global\c_page_sides_align \zerocount
- \global\c_strc_floats_rotation \zerocount
- \strc_floats_calculate_skips
+ % variable initializations
+ \global\d_page_sides_shift \zeropoint
+ \global\d_page_sides_maximum \zeropoint
+ \global\c_page_sides_align \zerocount
+ \global\c_page_sides_tolerance \zerocount
+ \global\c_page_sides_skipmode \zerocount
+ \global\c_strc_floats_rotation \zerocount
+ \global\d_strc_floats_margin \rootfloatparameter\c!margin
+ \global\d_page_sides_leftshift \floatparameter \c!leftmargindistance
+ \global\d_page_sides_rightshift \floatparameter \c!rightmargindistance
+ \global\d_page_sides_topoffset \floatparameter \c!topoffset
+ \global\d_page_sides_bottomoffset\floatparameter \c!bottomoffset
+ \global\c_page_sides_method \floatparameter \c!sidemethod
+ \global\c_page_one_float_method \floatparameter \c!textmethod
+ \global\c_page_floats_n_of_top \rootfloatparameter\c!ntop
+ \global\c_page_floats_n_of_bottom\rootfloatparameter\c!nbottom
+ \ifconditional\c_strc_floats_par_float
+ \global\d_strc_floats_top \zeropoint
+ \global\d_strc_floats_bottom \zeropoint
+ \strc_floats_calculate_skip\d_page_sides_topskip \c!sidespacebefore
+ \strc_floats_calculate_skip\d_page_sides_bottomskip\c!sidespaceafter
+ \strc_floats_calculate_skip\d_page_sides_midskip \c!sidespaceinbetween
+ \strc_floats_calculate_skip\d_strc_floats_top \c!spacebeforeside
+ \strc_floats_calculate_skip\d_strc_floats_bottom \c!spaceafterside
+ \else
+ \global\d_page_sides_topskip \zeropoint
+ \global\d_page_sides_bottomskip \zeropoint
+ \strc_floats_calculate_skip\d_strc_floats_top \c!spacebefore
+ \strc_floats_calculate_skip\d_strc_floats_bottom\c!spaceafter
+ \fi
+ % keyword handling
\ifconditional\c_strc_floats_par_float
\processaction
[\floatparameter\c!sidealign]
@@ -773,14 +815,11 @@
\doifinset\v!grid \floatlocation{\global\c_page_sides_align\plusfour }%
\doifinset\v!halfline\floatlocation{\global\c_page_sides_align\plusfive }% meant for 'none'
\fi
- \doifinset\v!high\floatlocation{\global\d_page_sides_topskip \zeropoint}%
- \doifinset\v!low \floatlocation{\global\d_page_sides_bottomskip\zeropoint}%
- \doifinset\v!fit \floatlocation
- {\global\d_page_sides_topskip \zeropoint
- \global\d_page_sides_bottomskip\zeropoint
- \global\d_strc_floats_margin \zeropoint}%
- \global\advance\d_page_sides_topskip \floatparameter\c!topoffset
- \global\advance\d_page_sides_bottomskip\floatparameter\c!bottomoffset
+ \doifinset\v!high \floatlocation{\global\c_page_sides_skipmode \plusone }%
+ \doifinset\v!low \floatlocation{\global\c_page_sides_skipmode \plustwo }%
+ \doifinset\v!fit \floatlocation{\global\c_page_sides_skipmode \plusthree}%
+ \doifinset\v!tolerant \floatlocation{\global\c_page_sides_tolerance\plusone }%
+ \doifinset\v!verytolerant\floatlocation{\global\c_page_sides_tolerance\plustwo }%
\else
\processallactionsinset
[\floatlocation]%
@@ -822,14 +861,18 @@
{\setfalse\c_page_floats_center_box_global
\setfalse\c_page_floats_center_box_local}}
-\let\naturalfloatheight\!!zeropoint
-\let\naturalfloatwidth \!!zeropoint
-\let\naturalfloatdepth \!!zeropoint
+\def\naturalfloatheight{\the\naturalfloatwd}
+\def\naturalfloatwidth {\the\naturalfloatht}
+\def\naturalfloatdepth {\the\naturalfloatdp}
+
+\newdimen\naturalfloatwd
+\newdimen\naturalfloatht
+\newdimen\naturalfloatdp
\def\strc_floats_set_natural_dimensions#box%
- {\xdef\naturalfloatheight{\the\ht#box}%
- \xdef\naturalfloatwidth {\the\wd#box}%
- \xdef\naturalfloatdepth {\the\dp#box}}
+ {\global\naturalfloatwd\wd#box\relax
+ \global\naturalfloatht\ht#box\relax
+ \global\naturalfloatdp\dp#box\relax}
\def\doifelsemainfloatbody
{\ifinsidesplitfloat
@@ -1038,9 +1081,7 @@
\unexpanded\def\installfloatmovement#1#2{\setvalue{\??floatmovement#1}{#2}}
\def\strc_floats_move_down#setting%
- {\csname\??floatmovement
- \ifcsname\??floatmovement#setting\endcsname#setting\fi
- \endcsname}
+ {\begincsname\??floatmovement#setting\endcsname}
\def\strc_floats_move_down_line#sign%
{\if!!donea \else
@@ -1162,34 +1203,6 @@
\unexpanded\def\placefloats
{\page_otr_command_flush_floats}
-\newdimen\d_strc_floats_margin
-\newdimen\d_strc_floats_top
-\newdimen\d_strc_floats_bottom
-
-\def\strc_floats_calculate_skip#target#skip%
- {\edef\askedfloatskip{#skip}%
- \ifx\askedfloatskip\empty
- \global#target\zeropoint
- \else\ifx\askedfloatskip\v!none
- \global#target\zeropoint
- \else
- \setbox\scratchbox\vbox{\whitespace\blank[\askedfloatskip]}% todo: move whitespace inside blank
- \global#target\ht\scratchbox
- \fi\fi}
-
-\def\strc_floats_calculate_skips
- {\begingroup
- \strc_floats_calculate_skip\d_strc_floats_top {\rootfloatparameter\c!spacebefore }%
- \strc_floats_calculate_skip\d_strc_floats_bottom {\rootfloatparameter\c!spaceafter }%
- \strc_floats_calculate_skip\d_page_sides_topskip {\rootfloatparameter\c!sidespacebefore}%
- \strc_floats_calculate_skip\d_page_sides_bottomskip{\rootfloatparameter\c!sidespaceafter }%
- \global\d_strc_floats_margin \rootfloatparameter\c!margin
- \global\d_page_sides_leftshift \floatparameter \c!leftmargindistance
- \global\d_page_sides_rightshift \floatparameter \c!rightmargindistance
- \global\c_page_floats_n_of_top \rootfloatparameter\c!ntop
- \global\c_page_floats_n_of_bottom\rootfloatparameter\c!nbottom
- \endgroup}
-
\unexpanded\def\betweenfloatblanko % assumes that spaceafter is present
{\blank[\rootfloatparameter\c!spacebefore]} % or v!back,....
@@ -1266,7 +1279,7 @@
\hsize\floattextwidth
\ignorespaces}
-\def\strc_floats_stop_text_indeed
+\def\strc_floats_stop_text_indeed % todo
{\egroup
\doifnotinset\v!tall\floatlocation
{\floattextheight\ifdim\ht\floattext<\floatheight\floatheight\else\ht\floattext\fi}%
@@ -1385,8 +1398,11 @@
\newdimen\d_strc_float_temp_height
\newdimen\d_strc_float_temp_width
-\def\captionminwidth {15\bodyfontsize}
-\def\captionovershoot{2\emwidth}
+\newconditional\c_floats_adapt_to_caption_width
+\newconditional\c_floats_store_minimal_package
+
+\def\captionminwidth {15\bodyfontsize} % can become parameter (but what name)
+\def\captionovershoot{2\emwidth} % can become parameter (but what name)
\let\strc_floats_mark_pag_as_free\relax
@@ -1426,7 +1442,9 @@
\or
% manual
\fi
- \ifcase\c_strc_floats_rotation
+ \ifconditional\c_floats_store_minimal_package
+ % nothing
+ \else\ifcase\c_strc_floats_rotation
\doifnotinset\v!margin\floatlocation % brr, really needed! see wm
{\postcenterfloatbox\d_strc_floats_content
\strc_floats_mark_pag_as_free}%
@@ -1435,7 +1453,7 @@
\global\setbox\floatbox\vpack
{\rotate[\c!rotation=\number\c_strc_floats_rotation]{\box\floatbox}}%
\strc_floats_mark_pag_as_free
- \fi
+ \fi\fi
\egroup}
\def\strc_floats_prepare_no_caption
@@ -1544,6 +1562,10 @@
\fi
\edef\captionhsize{\the\wd\b_strc_floats_content}%
\scratchwidth\floatcaptionparameter\c!maxwidth\relax
+ \ifconditional\c_floats_adapt_to_caption_width
+ \let\captionminwidth \!!zeropoint
+ \let\captionovershoot\!!zeropoint
+ \fi
\ifdim\captionhsize>\scratchwidth
% float is wider than \hsize
\setbox\b_strc_floats_caption\vbox
@@ -1571,6 +1593,9 @@
\ifdim\captionhsize<\captionminwidth\relax
\scratchdimen\captionminwidth % float smaller than min width
\edef\captionhsize{\the\scratchdimen}%
+% \ifconditional\c_floats_adapt_to_caption_width
+% \setbox\b_strc_floats_content\hpack to \captionhsize{\hss\box\b_strc_floats_content\hss}%
+% \fi
\fi
\setbox\scratchbox\vbox % test with overshoot
{\settrialtypesetting
@@ -1821,7 +1846,7 @@
\strc_floats_align_content{\box\b_strc_floats_content}%
\fi}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
- \vbox to \noflines\lineheight{\unvbox\scratchbox}}
+ \vbox to \noflines\lineheight{\unvbox\scratchbox}} % \vpack ?
\def\strc_floats_build_box_bottom_stack_grid
{\dp\b_strc_floats_caption\strutdepth
@@ -1838,7 +1863,7 @@
\strc_floats_locate_text_float{\box\b_strc_floats_caption}%
\fi}%
\getnoflines{\dimexpr\htdp\scratchbox-10\scaledpoint\relax}% get rid of inaccuracy
- \vbox to \noflines\lineheight{\unvbox\scratchbox}}
+ \vbox to \noflines\lineheight{\unvbox\scratchbox}} % \vpack ?
\def\strc_floats_build_box_top_stack_stretch
{\dp\b_strc_floats_caption\strutdepth
@@ -1903,14 +1928,52 @@
{\global\setbox\floatbox\vbox % pack ? probably not
{\strc_floats_set_local_hsize
\forgetall
- \let\floatcaptionarrangement\s!default
- \processcommacommand[\floatcaptionparameter\c!location]\strc_floats_build_box_step
- \ifcsname\??floatbuilder\floatcaptionarrangement\endcsname
- \lastnamedcs
+ \ifconditional\c_floats_store_minimal_package
+ \strc_floats_build_box_separate_make
\else
- \strc_floats_build_box_default
+ \let\floatcaptionarrangement\s!default
+ \processcommacommand[\floatcaptionparameter\c!location]\strc_floats_build_box_step
+ \ifcsname\??floatbuilder\floatcaptionarrangement\endcsname
+ \lastnamedcs
+ \else
+ \strc_floats_build_box_default
+ \fi
\fi}}
+% special purpose: used in floatcombinations
+
+\newbox\b_strc_floats_separate_content
+\newbox\b_strc_floats_separate_caption
+
+\def\strc_floats_build_box_separate_set
+ {\settrue\c_floats_adapt_to_caption_width
+ \settrue\c_floats_store_minimal_package}
+
+\def\strc_floats_build_box_separate_make
+ {\offinterlineskip
+ \vpack to \onepoint{\box\b_strc_floats_content}\break
+ \vpack to \onepoint{\box\b_strc_floats_caption}}
+
+\def\strc_floats_build_box_separate_split#1%
+ {\setbox\scratchbox\vbox{%
+ \setbox\scratchbox\vpack{#1}%
+ \unvbox\scratchbox\relax
+ \setbox\scratchbox\lastbox
+ %\doloop{%
+ \unvbox\scratchbox
+ \setbox\scratchbox\lastbox
+ % \ifdim\ht\scratchbox=2\onepoint
+ \unvbox\scratchbox
+ \setbox\scratchbox\lastbox
+ % \exitloop
+ % \fi}%
+ \splittopskip\zeropoint
+ \global\setbox\b_strc_floats_separate_content\vsplit\scratchbox to \onepoint
+ \global\setbox\b_strc_floats_separate_caption\vsplit\scratchbox to \onepoint
+ \global\setbox\b_strc_floats_separate_content\vpack{\unvbox\b_strc_floats_separate_content\setbox0\lastbox\unvbox0}%
+ \global\setbox\b_strc_floats_separate_caption\tpack{\unvbox\b_strc_floats_separate_caption\setbox0\lastbox\unvbox0}%
+ }}
+
% \def\strc_floats_build_box_step#1%
% {\doifdefined{\??floatbuilder#1}{\def\floatcaptionarrangement{#1}\quitcommalist}}
@@ -2000,14 +2063,14 @@
\forgetall
\postponenotes
\dontcomplain
- \setbox\b_strc_floats_content\vbox{\borderedfloatbox}%
+ \setbox\b_strc_floats_content\vbox{\borderedfloatbox}% \vpack >?
%\page_backgrounds_add_local_to_box\b_strc_floats_content
\ifnofloatcaption
\global\setbox\floatbox\vpack{\box\b_strc_floats_content}%
\else
\strc_floats_check_caption_content
\strc_floats_prepare_side_caption
- \setbox\b_strc_floats_caption\hbox{\floatcaptionparameter\c!command{\box\b_strc_floats_caption}}%
+ \setbox\b_strc_floats_caption\hbox{\floatcaptionparameter\c!command{\box\b_strc_floats_caption}}% \hpack ?
\moveboxontogrid\b_strc_floats_caption{\floatcaptionparameter\c!grid}\d_strc_floats_caption_height
%\page_backgrounds_add_local_to_box\b_strc_floats_caption
\strc_floats_build_side_box
@@ -2116,13 +2179,17 @@
\fi
\strc_floats_set_local_dimensions
\global\advance\totalnoffloats\plusone
- \setbox\floatbox\hpack{\strc_float_save_data\box\floatbox}% still needed? we will do renumbering differently
+ \ifconditional\c_floats_store_minimal_package \else
+ \setbox\floatbox\hpack{\strc_float_save_data\box\floatbox}% still needed? we will do renumbering differently
+ \fi
\global\floatheight\htdp\floatbox
\global\floatwidth\wd\floatbox
- \doifnotinset\v!margin\floatlocation % gaat namelijk nog fout
- {\setbox\floatbox\vpack
- {\parindent\zeropoint
- \box\floatbox}}%
+ \ifconditional\c_floats_store_minimal_package \else
+ \doifnotinset\v!margin\floatlocation % gaat namelijk nog fout
+ {\setbox\floatbox\vpack
+ {\parindent\zeropoint
+ \box\floatbox}}%
+ \fi
\wd\floatbox\floatwidth
\ifdim\dimexpr\floatheight+\lineheight\relax<\textheight \else
\global\floatheight\dimexpr\textheight-\lineheight\relax
@@ -2186,6 +2253,11 @@
\ifx\forcedfloatmethod\empty \else
\let\floatmethod\forcedfloatmethod
\fi
+\let\askedfloatmethod\floatmethod
+\ifexporting \ifx\askedfloatmethod\v!here \else
+ \showmessage\m!floatblocks{15}{\askedfloatmethod,\v!here}%
+ \let\floatlocation\v!here
+\fi \fi
% [] will go
\edef\floatlocationmethod{\floatmethod,\floatlocation}%
\csname\??floatmethods\currentoutputroutine:\floatmethod\endcsname
diff --git a/tex/context/base/mkiv/strc-ini.mkvi b/tex/context/base/mkiv/strc-ini.mkvi
index ad83cbc58..56621b6e6 100644
--- a/tex/context/base/mkiv/strc-ini.mkvi
+++ b/tex/context/base/mkiv/strc-ini.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Initialization & Helpers}
-\registerctxluafile{strc-ini}{1.001}
+\registerctxluafile{strc-ini}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-itm.mkvi b/tex/context/base/mkiv/strc-itm.mkvi
index a28193415..0bea62de8 100644
--- a/tex/context/base/mkiv/strc-itm.mkvi
+++ b/tex/context/base/mkiv/strc-itm.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Itemgroups}
-\registerctxluafile{strc-itm}{1.001}
+\registerctxluafile{strc-itm}{}
%D As we analyze/register widths and such we could as well push and pop the
%D numbers at the \LUA\ end (which saves a few calls).
@@ -410,8 +410,14 @@
\settrue\c_strc_itemgroups_pack
\fi}
+\def\strc_itemgroups_process_set_option_unpack
+ {\ifcase\c_strc_itemgroups_nesting\else
+ \setfalse\c_strc_itemgroups_pack
+ \fi}
+
\setvalue{\??itemgroupkeyword\!!zerocount }{} % ignore 0
\setvalue{\??itemgroupkeyword\v!packed }{\strc_itemgroups_process_set_option_pack}
+\setvalue{\??itemgroupkeyword\v!unpacked }{\strc_itemgroups_process_set_option_unpack}
\setvalue{\??itemgroupkeyword\v!intro }{\settrue\c_strc_itemgroups_intro} % here? not set to false
\setvalue{\??itemgroupkeyword\v!autointro }{\settrue\c_strc_itemgroups_auto_intro}
\setvalue{\??itemgroupkeyword\v!broad }{\ifx\itemgroupfirst\empty
@@ -442,6 +448,7 @@
\setvalue{\??itemgroupkeyword\v!serried }{\edef\itemgroupfirst{-\ifx\itemgroupfirst\empty1\else\itemgroupfirst\fi}%
\letitemgroupparameter\c!factor\itemgroupfirst}
\setvalue{\??itemgroupkeyword\v!stopper }{\letitemgroupparameter\c!placestopper\v!yes} % keep {}
+\setvalue{\??itemgroupkeyword\v!nostopper }{\letitemgroupparameter\c!placestopper\v!no} % keep {}
\setvalue{\??itemgroupkeyword\v!repeat }{\settrue\c_strc_itemgroups_repeat}
\setvalue{\??itemgroupkeyword\v!norepeat }{\setfalse\c_strc_itemgroups_repeat}
\setvalue{\??itemgroupkeyword\v!reverse }{\settrue\c_strc_itemgroups_reverse}
@@ -1125,19 +1132,21 @@
\strc_itemgroups_margin_symbol
\let\strc_itemgroups_margin_symbol\relax
\dostarttagged\t!itemcontent\empty
- \strut
+ \begstrut % \strut
\nobreak % else problems with intext items
\seteffectivehsize % NEW !
\hskip\d_strc_itemgroups_signal % concat
\itemgroupparameter\c!command}
\unexpanded\def\stopitemgroupitem
- {\ifconditional\c_strc_itemgroups_text
+ {\ifhmode
+ \endstrut % new per 2017-12-15
+ \fi
+ \ifconditional\c_strc_itemgroups_text
% nothing
\else
\endgraf
\fi}
-
\unexpanded\def\startitemgrouphead
{\dosingleempty\strc_itemgroups_start_head}
@@ -1212,7 +1221,9 @@
\dotagsetitem\s!symbol}
\unexpanded\def\strc_itemgroups_start_dummy
- {\strc_itemgroups_start_symbol\strut\strut} % two ?
+ {\strc_itemgroups_start_symbol
+ %\strut\strut} % two ?
+ \begstrut}
\unexpanded\def\strc_itemgroups_start_subitem
{\settrue\c_strc_itemgroups_sub
diff --git a/tex/context/base/mkiv/strc-lev.mkvi b/tex/context/base/mkiv/strc-lev.mkvi
index 6e08e7c07..b8b633c32 100644
--- a/tex/context/base/mkiv/strc-lev.mkvi
+++ b/tex/context/base/mkiv/strc-lev.mkvi
@@ -19,7 +19,7 @@
%D it as core functionality. For the moment this an experiment that
%D Alan and I conduct so it might evolve.
-\registerctxluafile{strc-lev}{1.001}
+\registerctxluafile{strc-lev}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-lst.lua b/tex/context/base/mkiv/strc-lst.lua
index be8e07112..a235fac75 100644
--- a/tex/context/base/mkiv/strc-lst.lua
+++ b/tex/context/base/mkiv/strc-lst.lua
@@ -15,7 +15,7 @@ if not modules then modules = { } end modules ['strc-lst'] = {
--
-- move more to commands
-local tonumber, type = tonumber, type
+local tonumber, type, next = tonumber, type, next
local concat, insert, remove, sort = table.concat, table.insert, table.remove, table.sort
local lpegmatch = lpeg.match
diff --git a/tex/context/base/mkiv/strc-lst.mkvi b/tex/context/base/mkiv/strc-lst.mkvi
index 08e56a700..153d879b7 100644
--- a/tex/context/base/mkiv/strc-lst.mkvi
+++ b/tex/context/base/mkiv/strc-lst.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Lists}
-\registerctxluafile{strc-lst}{1.001}
+\registerctxluafile{strc-lst}{}
% clean up in progress ...
%
@@ -144,7 +144,8 @@
\def\strc_lists_inject_nop[#dummya][#dummyb]%
{\endgroup}
-\unexpanded\def\strc_lists_inject_enhance#listindex#internal%
+% \unexpanded
+\def\strc_lists_inject_enhance#listindex#internal%
{\normalexpanded{\ctxlatecommand{enhancelist(\number#listindex)}}}
\unexpanded\def\strc_lists_inject_yes[#settings][#userdata]% can be used directly
@@ -740,7 +741,7 @@
% {\hskip.25\emwidth\relax}
\setuplistalternative
- [\c!command=\strictlistparameter\c!command,
+ [\c!command=\directlistparameter\c!command,
\c!symbol=.]
\unexpanded\def\currentlistfiller
@@ -1452,40 +1453,7 @@
\strc_lists_interaction_check_nop
\fi}
-% \def\strc_lists_interaction_check_yes
-% {\edef\p_interaction_forward{\listparameter\c!interaction}%
-% \ifcsname\??listinteractions\p_interaction_forward\endcsname
-% \expandafter\let\expandafter\p_interaction_forward\csname\??listinteractions\p_interaction_forward\endcsname
-% \strc_references_get_simple_reference{internal(\currentlistentrylocation)}%
-% \a_strc_lists_reference\currentreferenceattribute
-% \else
-% \a_strc_lists_reference\attributeunsetvalue
-% \fi
-% \ifnum\a_strc_lists_reference=\attributeunsetvalue
-% \let\strc_lists_get_reference_attribute\gobbleoneargument
-% \let\strc_lists_set_reference_attribute\gobbleoneargument
-% \let\strc_lists_set_style_color \strc_lists_set_style_color_normal
-% \else
-% \let\strc_lists_get_reference_attribute\strc_lists_get_reference_attribute_indeed
-% \let\strc_lists_set_reference_attribute\strc_lists_set_reference_attribute_indeed
-% \let\strc_lists_set_style_color \strc_lists_set_style_color_special
-% \fi
-% \edef\p_interaction_backward{\namedheadparameter\currentlist\c!interaction}% \namedheadparameter !
-% \ifx\p_interaction_backward\v!list
-% \strc_references_set_simple_reference{*\currentlistentrylocation}%
-% \a_strc_lists_destination\currentdestinationattribute
-% \else
-% \a_strc_lists_destination\attributeunsetvalue
-% \fi
-% \ifnum\a_strc_lists_destination=\attributeunsetvalue
-% \let\strc_lists_get_destination_attribute\empty
-% \let\strc_lists_set_destination_attribute\empty
-% \else
-% \let\strc_lists_get_destination_attribute\strc_lists_get_destination_attribute_indeed
-% \let\strc_lists_set_destination_attribute\strc_lists_set_destination_attribute_indeed
-% \fi}
-
-\def\strc_lists_interaction_check_yes
+\def\strc_lists_interaction_check_yes_yes
{\edef\p_interaction_forward{\listparameter\c!interaction}%
\ifcsname\??listinteractions\p_interaction_forward\endcsname
%\expandafter\let\expandafter\p_interaction_forward\csname\??listinteractions\p_interaction_forward\endcsname
@@ -1496,13 +1464,13 @@
\a_strc_lists_reference\attributeunsetvalue
\fi
\ifnum\a_strc_lists_reference=\attributeunsetvalue
- \let\strc_lists_get_reference_attribute\gobbleoneargument
- \let\strc_lists_set_reference_attribute\gobbleoneargument
- \let\strc_lists_set_style_color \strc_lists_set_style_color_normal
+ \let\strc_lists_get_reference_attribute\gobbleoneargument
+ \let\strc_lists_set_reference_attribute\gobbleoneargument
+ \let\strc_lists_set_style_color \strc_lists_set_style_color_normal
\else
- \let\strc_lists_get_reference_attribute\strc_lists_get_reference_attribute_indeed
- \let\strc_lists_set_reference_attribute\strc_lists_set_reference_attribute_indeed
- \let\strc_lists_set_style_color \strc_lists_set_style_color_special
+ \let\strc_lists_get_reference_attribute\strc_lists_get_reference_attribute_indeed
+ \let\strc_lists_set_reference_attribute\strc_lists_set_reference_attribute_indeed
+ \let\strc_lists_set_style_color \strc_lists_set_style_color_special
\fi
\edef\p_interaction_backward{\namedheadparameter\currentlist\c!interaction}% \namedheadparameter !
\ifx\p_interaction_backward\v!list
@@ -1512,13 +1480,31 @@
\a_strc_lists_destination\attributeunsetvalue
\fi
\ifnum\a_strc_lists_destination=\attributeunsetvalue
- \let\strc_lists_get_destination_attribute\empty
- \let\strc_lists_set_destination_attribute\empty
+ \let\strc_lists_get_destination_attribute\empty
+ \let\strc_lists_set_destination_attribute\empty
\else
- \let\strc_lists_get_destination_attribute\strc_lists_get_destination_attribute_indeed
- \let\strc_lists_set_destination_attribute\strc_lists_set_destination_attribute_indeed
+ \let\strc_lists_get_destination_attribute\strc_lists_get_destination_attribute_indeed
+ \let\strc_lists_set_destination_attribute\strc_lists_set_destination_attribute_indeed
\fi}
+\def\strc_lists_interaction_check_yes_nop
+ {\a_strc_lists_reference \attributeunsetvalue
+ \a_strc_lists_destination\attributeunsetvalue
+ \let\strc_lists_get_reference_attribute\gobbleoneargument
+ \let\strc_lists_set_reference_attribute\gobbleoneargument
+ \let\strc_lists_get_destination_attribute\empty
+ \let\strc_lists_set_destination_attribute\empty
+ \let\strc_lists_set_style_color\strc_lists_set_style_color_normal}
+
+\def\strc_lists_interaction_check_yes
+ {\ifx\currentlistentrylocation\empty
+ \strc_lists_interaction_check_yes_nop
+ \else\ifnum\currentlistentrylocation=\zerocount
+ \strc_lists_interaction_check_yes_nop
+ \else
+ \strc_lists_interaction_check_yes_yes
+ \fi\fi}
+
\def\strc_lists_interaction_check_nop
{\let\strc_lists_get_reference_attribute \gobbleoneargument
\let\strc_lists_set_reference_attribute \gobbleoneargument
diff --git a/tex/context/base/mkiv/strc-mar.lua b/tex/context/base/mkiv/strc-mar.lua
index 624972af4..80fcf126a 100644
--- a/tex/context/base/mkiv/strc-mar.lua
+++ b/tex/context/base/mkiv/strc-mar.lua
@@ -24,8 +24,6 @@ local setmetatableindex = table.setmetatableindex
local nuts = nodes.nuts
local tonut = nuts.tonut
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local getid = nuts.getid
local getlist = nuts.getlist
local getattr = nuts.getattr
diff --git a/tex/context/base/mkiv/strc-mar.mkiv b/tex/context/base/mkiv/strc-mar.mkiv
index 4e8df5f5d..df5cae692 100644
--- a/tex/context/base/mkiv/strc-mar.mkiv
+++ b/tex/context/base/mkiv/strc-mar.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Markings}
-\registerctxluafile{strc-mar}{1.001}
+\registerctxluafile{strc-mar}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-mat.mkiv b/tex/context/base/mkiv/strc-mat.mkiv
index 4308666f3..775d2aca1 100644
--- a/tex/context/base/mkiv/strc-mat.mkiv
+++ b/tex/context/base/mkiv/strc-mat.mkiv
@@ -14,7 +14,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Math Numbering}
-\registerctxluafile{strc-mat}{1.001}
+\registerctxluafile{strc-mat}{}
% -- we have potential for captions
% -- this module will use the commandhandler
@@ -46,6 +46,7 @@
\c!indentnext=\v!no,
\c!alternative=\s!default,
\c!strut=\v!no,
+ \c!numberstrut=\v!yes, % \v!no \v!yes \v!always
\c!distance=2\emwidth]
\setupformulaframed
@@ -236,11 +237,14 @@
\global\setfalse\c_strc_formulas_inside_place_sub
\to \everyresetformulas
+\def\strc_formulas_place_number_noneed
+ {\doif{\formulaparameter\c!numberstrut}\v!always\strut}
+
\def\strc_formulas_place_numbering % place formula
{\settrue\c_strc_formulas_handle_number
\strc_formulas_check_reference\c_strc_formulas_place_number_mode\currentplaceformulareference
\ifnum\c_strc_formulas_place_number_mode=\plustwo
- \glet\strc_formulas_place_number\relax
+ \glet\strc_formulas_place_number\strc_formulas_place_number_noneed
\else
\glet\strc_formulas_place_number\strc_formulas_place_number_indeed
\fi
@@ -281,7 +285,12 @@
\begingroup
\useformulastyleandcolor\c!numberstyle\c!numbercolor
\formulaparameter\c!numbercommand
- {\strut
+ {\edef\p_strut{\formulaparameter\c!numberstrut}%
+ \ifx\p_strut\v!always
+ \strut
+ \else\ifx\p_strut\v!yes
+ \strut
+ \fi\fi
\formulaparameter\c!left
\namedtaggedlabeltexts
\t!formulalabel \v!formula
@@ -295,8 +304,14 @@
\endgroup}
\unexpanded\def\strc_formulas_place_current_number
- {\strc_formulas_handle_current_references
- \labeltexts\currentformula{\convertedcounter[\v!formula][]}}
+ {\ifx\namedformulaentry\empty
+ \strc_formulas_handle_current_references
+ \labeltexts\currentformula{\convertedcounter[\v!formula][]}%
+ \else
+ \expandafter % hm, the next one reset \namedformulaentry
+ \strc_formulas_handle_current_references
+ \namedformulaentry
+ \fi}
\def\theformuladestinationattribute#1%
{\iflocation\ifx#1\relax\else\ifx#1\empty\else
@@ -395,8 +410,10 @@
% needs checking ... too many:
\def\strc_formulas_handle_numbering_indeed
- {\strc_counters_increment\v!formula
- \doiftext\currentplaceformulasuffix{\strc_counters_setown_sub\v!formula\plustwo\currentplaceformulasuffix}%
+ {\ifx\namedformulaentry\empty
+ \strc_counters_increment\v!formula
+ \doiftext\currentplaceformulasuffix{\strc_counters_setown_sub\v!formula\plustwo\currentplaceformulasuffix}%
+ \fi
\placecurrentformulanumber}
\def\strc_formulas_handle_numbering
@@ -546,8 +563,10 @@
\newconstant\c_strc_formulas_mode % this will go away
\newconstant\c_strc_formulas_space_model
+\newconstant\c_strc_math_vertical % experiment
+
\c_strc_formulas_mode \plustwo % 0=native 1=simple (old) 2=align (new)
-\c_strc_formulas_space_model\plusthree % replaces \plusone
+\c_strc_formulas_space_model\plusthree % replaces \plusone, we might use \plusfour in the future
\newconditional\c_strc_formulas_tight
@@ -622,35 +641,82 @@
\directvspacing\p_spaceafter
\fi}
-\def\strc_math_obey_depth
- {\ifvmode\ifdim\prevdepth<\zeropoint\else\ifdim\prevdepth<\strutdp
- % maybe add a tracing option here
- \ifgridsnapping
- \directvspacing\v!depth
- \else
- \kern\dimexpr\strutdp-\prevdepth\relax
- \prevdepth\strutdp
- \fi
- \fi\fi\fi}
-
\setvalue{\??mathdisplayspacemodel\v!before:3}%
{% not ok, try \stopformula\par\startformula vs \stopformula\startformula
- \ifdim\lastskip>\zeropoint
- % bah
- \else
- \strc_math_obey_depth % somehow \fakenextstrutline doesn't work here
+ \let\m_spacebefore\empty
+ \ifvmode
+ \ifdim\lastskip>\zeropoint\else
+ \ifdim\prevdepth<\zeropoint\else
+ \ifdim\prevdepth<\strutdp
+ % maybe add a tracing option here
+ \ifgridsnapping
+ \let\m_spacebefore\v!depth
+ \else
+ \edef\m_spacebefore{\the\dimexpr\strutdp-\prevdepth\relax}%
+ \fi
+ \fi
+ \fi
+ \fi
\nointerlineskip
\fi
- \ifx\p_spacebefore\v!none
+ \ifx\m_spacebefore\empty
+ \ifx\p_spacebefore\v!none
+ % nothing
+ \else\ifx\p_spacebefore\empty
+ \directvspacing\currentvspacing
+ \else
+ \directvspacing{\p_spacebefore,\the\scratchdimen}%
+ \fi\fi
+ \else
+ \ifx\p_spacebefore\v!none
+ \directvspacing{\m_spacebefore}%
+ \else\ifx\p_spacebefore\empty
+ \directvspacing{\m_spacebefore,\currentvspacing}%
+ \else
+ \directvspacing{\m_spacebefore,\p_spacebefore}%
+ \fi\fi
+ \fi}
+
+\setvalue{\??mathdisplayspacemodel\v!after:3}%
+ {\prevdepth\strutdp % \directvspacing\v!depth
+ \ifx\p_spaceafter\v!none
% nothing
\else\ifx\p_spaceafter\empty
\directvspacing\currentvspacing
\else
- \directvspacing\p_spacebefore
+ \directvspacing\p_spaceafter
\fi\fi}
-\setvalue{\??mathdisplayspacemodel\v!after:3}%
- {\prevdepth\strutdp % \directvspacing\v!depth
+\newconditional\c_math_model_four_indeed
+
+\setvalue{\??mathdisplayspacemodel\v!before:4}%
+ {% not ok, try \stopformula\par\startformula vs \stopformula\startformula
+ \ifvmode
+ \ifinner
+ \csname\??mathdisplayspacemodel\v!before:3\endcsname
+ \else
+ \settrue\c_math_model_four_indeed
+ \forcestrutdepth
+ \nointerlineskip
+ \ifx\p_spacebefore\v!none
+ % nothing
+ \else\ifx\p_spacebefore\empty
+ \directvspacing\currentvspacing
+ \else
+ \directvspacing{\p_spacebefore,\the\scratchdimen}%
+ \fi\fi
+ \fi
+ \else
+ \csname\??mathdisplayspacemodel\v!before:3\endcsname
+ \fi}
+
+\setvalue{\??mathdisplayspacemodel\v!after:4}%
+ {\ifconditional\c_math_model_four_indeed
+ \setfalse\c_math_model_four_indeed
+ \forcestrutdepth
+ \else
+ \prevdepth\strutdp % \directvspacing\v!depth
+ \fi
\ifx\p_spaceafter\v!none
% nothing
\else\ifx\p_spaceafter\empty
@@ -659,6 +725,11 @@
\directvspacing\p_spaceafter
\fi\fi}
+\unexpanded\def\setdisplaymathspacemodel[#1]%
+ {\ifcsname\??mathdisplayspacemodel\v!before:\number#1\endcsname
+ \c_strc_formulas_space_model#1\relax
+ \fi}
+
% \newtoks\everybeforedisplay
% \appendtoks\page_sides_check_floats_indeed\to\everybeforedisplay
@@ -730,6 +801,9 @@
{\d_strc_formulas_display_skip_left \zeropoint
\d_strc_formulas_display_skip_right\zeropoint}
+\setvalue{\??formulaoption\v!depth}%
+ {\c_strc_formulas_space_model\plusfour}
+
\setvalue{\??formulaoption\v!line}%
{\ifgridsnapping
\setformulaparameter\c!grid{\v!math:\v!line}%
@@ -750,12 +824,64 @@
\setformulaparameter\c!grid{\v!math:-\v!halfline}%
\fi}
+% when we have 1.0.6 we wil use \mathpenaltiesmode
+%
+% \prebinoppenalty -100
+% \prerelpenalty -100
+
+\def\strc_math_set_split
+ {\edef\p_split{\formulaparameter\c!split}%
+ \ifx\p_split\v!yes
+ \global\c_strc_math_vertical\plusone
+ \else\ifx\p_split\v!page
+ \global\c_strc_math_vertical\plustwo
+ \else
+ \global\c_strc_math_vertical\zerocount
+ \fi\fi
+ \ifcase\c_strc_math_vertical
+ % \mathpenaltiesmode \zerocount
+ \clf_setmathpenalties\zerocount
+ \clf_resetmathhang
+ \else
+ % \mathpenaltiesmode \plusone
+ \clf_setmathpenalties\plusone
+ \edef\p_hang{\formulaparameter\c!hang}%
+ \ifx\p_hang\v!none
+ \global\setfalse\c_strc_math_indent
+ \clf_resetmathhang
+ \else
+ \global\settrue\c_strc_math_indent
+ \clf_setmathhang {%
+ method {\p_hang}%
+ distance \formulaparameter\c!distance
+ }%
+ \fi
+ \fi}
+
+\setupformula
+ [\c!split=\v!no,
+ \c!distance=\zeropoint,
+ %\c!interlinespace=1.5\lineheight,
+ \c!interlinespace=,
+ \c!hang=\v!none]
+
+% for the moment (when testing) we use a penalty 1
+
+\unexpanded\def\strc_math_align_here{\ifmmode\penalty\plusone\fi}%
+\unexpanded\def\strc_math_break_here{\ifmmode\hfill\break \fi}%
+
+\appendtoks
+ \let\alignhere\strc_math_align_here
+ \let\breakhere\strc_math_break_here
+\to \everymathematics
+
\unexpanded\def\strc_formulas_start_formula_indeed[#1][#2]% setting leftskip adaption is slow !
{\ifhmode
\par
\fi
\bgroup % HERE
\def\currentformula{#1}%
+ \strc_math_set_split
\dostarttaggedchained\t!formula\currentformula\??formula
\setfalse\c_strc_formulas_tight
\d_strc_formulas_display_skip_left \leftskip
diff --git a/tex/context/base/mkiv/strc-not.mkvi b/tex/context/base/mkiv/strc-not.mkvi
index db27cb5af..8952f0e9c 100644
--- a/tex/context/base/mkiv/strc-not.mkvi
+++ b/tex/context/base/mkiv/strc-not.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Note Handling}
-\registerctxluafile{strc-not}{1.001}
+\registerctxluafile{strc-not}{}
\unprotect
@@ -1381,23 +1381,30 @@
\penalty\zerocount % otherwise no split in columns, maybe just always (tex just adds it to accumulated)
\fi}
-
\appendtoks
\strc_notes_set_penalties
- \forgetall
+ \forgetall % again
\strc_notes_set_bodyfont
\redoconvertfont % to undo \undo calls in in headings etc
\splittopskip\strutht % not actually needed here
\splitmaxdepth\strutdp % not actually needed here
- % brrr
-% \leftmargindistance \noteparameter\c!margindistance
-% \rightmargindistance\leftmargindistance
-% \ifnum\noteparameter\c!n=\zerocount % no ifcase new 31-07-99 ; always ?
-% \doifnotinset{\noteparameter\c!width}{\v!fit,\v!broad}\setnotehsize % ?
-% \fi
+ % not:
+% \leftmargindistance \noteparameter\c!margindistance
+% \rightmargindistance\leftmargindistance
+% \ifnum\noteparameter\c!n=\zerocount % no ifcase new 31-07-99 ; always ?
+% \doifnotinset{\noteparameter\c!width}{\v!fit,\v!broad}\setnotehsize % ?
+% \fi
+ %
\to \everyinsidenoteinsert
-\appendtoks % only needed when columns
+% maybe but better use [scope=local] here
+%
+% \appendtoks
+% \setfalse\inhibitmargindata
+% \to \everyinsidenoteinsert
+
+\appendtoks
+ % only needed when columns (could be three \set...)
\setsimplecolumnshsize[\c!distance=\noteparameter\c!columndistance,\c!n=\noteparameter\c!n,\c!width=\noteparameter\c!width]%
\to \everyinsidenoteinsert
@@ -1550,11 +1557,27 @@
\let\flushnotes\relax
+\unexpanded\def\startpostponingnotes % experimental, page-mix
+ {\ifconditional\postponingnotes\else
+ \global\settrue\postponingnotes
+ %\global\let\flushnotes\doflushnotes
+ \clf_postponenotes
+ \fi}
+
+\unexpanded\def\stoppostponingnotes % experimental, page-mix
+ {\doflushnotes}
+
\unexpanded\def\doflushnotes
{\ifconditional\postponingnotes
\begingroup
\let\flushnotes \relax
\let\postponenotes\relax
+ \ifconditional\postponednote
+ \ifhmode
+ % needed for tagging ... otherwise we get some weird node free error
+ \signalcharacter
+ \fi
+ \fi
\clf_flushpostponednotes% this also resets the states !
\global\setfalse\postponednote
\global\setfalse\postponingnotes
diff --git a/tex/context/base/mkiv/strc-num.lua b/tex/context/base/mkiv/strc-num.lua
index 98db1b42d..e1a133f4a 100644
--- a/tex/context/base/mkiv/strc-num.lua
+++ b/tex/context/base/mkiv/strc-num.lua
@@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['strc-num'] = {
}
local format = string.format
-local next, type = next, type
+local next, type, tonumber = next, type, tonumber
local min, max = math.min, math.max
local insert, remove, copy = table.insert, table.remove, table.copy
local texsetcount = tex.setcount
@@ -15,6 +15,8 @@ local texsetcount = tex.setcount
-- Counters are managed here. They can have multiple levels which makes it easier to synchronize
-- them. Synchronization is sort of special anyway, as it relates to document structuring.
+local context = context
+
local allocate = utilities.storage.allocate
local setmetatableindex = table.setmetatableindex
local setmetatablecall = table.setmetatablecall
diff --git a/tex/context/base/mkiv/strc-num.mkiv b/tex/context/base/mkiv/strc-num.mkiv
index 4b222801a..be35e7671 100644
--- a/tex/context/base/mkiv/strc-num.mkiv
+++ b/tex/context/base/mkiv/strc-num.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Basic Numbering}
-\registerctxluafile{strc-num}{1.001}
+\registerctxluafile{strc-num}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-pag.mkiv b/tex/context/base/mkiv/strc-pag.mkiv
index 21758d671..d56f0de54 100644
--- a/tex/context/base/mkiv/strc-pag.mkiv
+++ b/tex/context/base/mkiv/strc-pag.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Pagenumbering}
-\registerctxluafile{strc-pag}{1.001}
+\registerctxluafile{strc-pag}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-ref.lua b/tex/context/base/mkiv/strc-ref.lua
index a9a7c7121..73f1ca886 100644
--- a/tex/context/base/mkiv/strc-ref.lua
+++ b/tex/context/base/mkiv/strc-ref.lua
@@ -16,7 +16,7 @@ if not modules then modules = { } end modules ['strc-ref'] = {
local format, find, gmatch, match, strip = string.format, string.find, string.gmatch, string.match, string.strip
local floor = math.floor
-local rawget, tonumber, type = rawget, tonumber, type
+local rawget, tonumber, type, next = rawget, tonumber, type, next
local lpegmatch = lpeg.match
local insert, remove, copytable = table.insert, table.remove, table.copy
local formatters = string.formatters
@@ -37,10 +37,10 @@ local check_duplicates = true
directives.register("structures.referencing.checkduplicates", function(v) check_duplicates = v end)
local report_references = logs.reporter("references")
-local report_unknown = logs.reporter("references","unknown")
local report_identifying = logs.reporter("references","identifying")
local report_importing = logs.reporter("references","importing")
local report_empty = logs.reporter("references","empty")
+local report = report_references
local variables = interfaces.variables
local v_page = variables.page
@@ -59,11 +59,7 @@ local texconditionals = tex.conditionals
local productcomponent = resolvers.jobs.productcomponent
local justacomponent = resolvers.jobs.justacomponent
-local logsnewline = logs.newline
-local logspushtarget = logs.pushtarget
-local logspoptarget = logs.poptarget
-
------ settings_to_array = utilities.parsers.settings_to_array
+local settings_to_array = utilities.parsers.settings_to_array
local settings_to_table = utilities.parsers.settings_to_array_obey_fences
local process_settings = utilities.parsers.process_stripped_settings
local unsetvalue = attributes.unsetvalue
@@ -123,6 +119,8 @@ local currentreference = nil
local txtcatcodes = catcodes.numbers.txtcatcodes -- or just use "txtcatcodes"
+local context = context
+
local ctx_pushcatcodes = context.pushcatcodes
local ctx_popcatcodes = context.popcatcodes
local ctx_dofinishreference = context.dofinishreference
@@ -1517,7 +1515,7 @@ local function identify_outer(set,var,i)
local inner = var.inner
local external = externals[outer]
if external then
- local v = identify_inner(set,var,nil,external)
+ local v = identify_inner(set,var,"",external)
if v then
v.kind = "outer with inner"
set.external = true
@@ -1526,19 +1524,24 @@ local function identify_outer(set,var,i)
end
return v
end
- local v = identify_inner(set,var,var.outer,external)
- if v then
- v.kind = "outer with inner"
- set.external = true
- if trace_identifying then
- report_identify_outer(set,v,i,"2b")
+ -- somewhat rubish: we use outer as first step in the externals table so it makes no
+ -- sense to have it as prefix so the next could be an option
+ local external = external[""]
+ if external then
+ local v = identify_inner(set,var,var.outer,external)
+ if v then
+ v.kind = "outer with inner"
+ set.external = true
+ if trace_identifying then
+ report_identify_outer(set,v,i,"2b")
+ end
+ return v
end
- return v
end
end
local external = productdata.componentreferences[outer]
if external then
- local v = identify_inner(set,var,nil,external)
+ local v = identify_inner(set,var,"",external)
if v then
v.kind = "outer with inner"
set.external = true
@@ -1577,7 +1580,12 @@ local function identify_outer(set,var,i)
end
var.i = inner
var.f = outer
- var.r = (inner.references and inner.references.realpage) or (inner.pagedata and inner.pagedata.realpage) or 1
+ if type(inner) == "table" then
+ -- can this really happen?
+ var.r = (inner.references and inner.references.realpage) or (inner.pagedata and inner.pagedata.realpage) or 1
+ else
+ var.r = 1
+ end
if trace_identifying then
report_identify_outer(set,var,i,"2e")
end
@@ -1869,52 +1877,68 @@ implement {
}
}
-function references.reportproblems() -- might become local
+logs.registerfinalactions(function()
if nofunknowns > 0 then
statistics.register("cross referencing", function()
return format("%s identified, %s unknown",nofidentified,nofunknowns)
end)
- logspushtarget("logfile")
- logsnewline()
- report_references("start problematic references")
- logsnewline()
- for k, v in table.sortedpairs(unknowns) do
- report_unknown("%4i: %s",v,k)
+ local sortedhash = table.sortedhash
+ logs.startfilelogging(report,"missing references")
+ for k, v in table.sortedhash(unknowns) do
+ report("%4i %s",v,k)
+ end
+ logs.stopfilelogging()
+ if logs.loggingerrors() then
+ logs.starterrorlogging(report,"missing references")
+ for k, v in table.sortedhash(unknowns) do
+ report("%4i %s",v,k)
+ end
+ logs.stoperrorlogging()
end
- logsnewline()
- report_references("stop problematic references")
- logsnewline()
- logspoptarget()
end
-end
-
-luatex.registerstopactions(references.reportproblems)
+end)
-- The auto method will try to avoid named internals in a clever way which
-- can make files smaller without sacrificing external references. Some of
-- the housekeeping happens the backend side.
local innermethod = v_auto -- only page|auto now
+local outermethod = v_auto -- only page|auto now
local defaultinnermethod = defaultinnermethod
+local defaultoutermethod = defaultoutermethod
references.innermethod = innermethod -- don't mess with this one directly
+references.outermethod = outermethod -- don't mess with this one directly
-function references.setinnermethod(m)
- if toboolean(m) or m == v_page or m == v_yes then
+function references.setlinkmethod(inner,outer)
+ if not outer and type(inner) == "string" then
+ local m = settings_to_array(inner)
+ inner = m[1]
+ outer = m[2] or v_auto
+ end
+ if toboolean(inner) or inner == v_page or inner == v_yes then
innermethod = v_page
- elseif m == v_name then
+ elseif inner == v_name then
innermethod = v_name
else
innermethod = v_auto
end
+ if toboolean(outer) or outer == v_page or outer == v_yes then
+ outermethod = v_page
+ elseif inner == v_name then
+ outermethod = v_name
+ else
+ outermethod = v_auto
+ end
references.innermethod = innermethod
- function references.setinnermethod()
- report_references("inner method is already set and frozen to %a",innermethod)
+ references.outermethod = outermethod
+ function references.setlinkmethod()
+ report_references("link method is already set and frozen: inner %a, outer %a",innermethod,outermethod)
end
end
implement {
- name = "setinnerreferencemethod",
- actions = references.setinnermethod,
+ name = "setreferencelinkmethod",
+ actions = references.setlinkmethod,
arguments = "string",
-- onlyonce = true
}
@@ -1923,8 +1947,12 @@ function references.getinnermethod()
return innermethod or defaultinnermethod
end
+function references.getoutermethod()
+ return outermethod or defaultoutermethod
+end
+
directives.register("references.linkmethod", function(v) -- page auto
- references.setinnermethod(v)
+ references.setlinkmethod(v)
end)
-- we can call setinternalreference with an already known internal or with
diff --git a/tex/context/base/mkiv/strc-ref.mkvi b/tex/context/base/mkiv/strc-ref.mkvi
index 9f2a7b91c..d0752407c 100644
--- a/tex/context/base/mkiv/strc-ref.mkvi
+++ b/tex/context/base/mkiv/strc-ref.mkvi
@@ -27,28 +27,12 @@
\writestatus{loading}{ConTeXt Structure Macros / Cross Referencing}
-\registerctxluafile{strc-rsc}{1.001}
-\registerctxluafile{strc-ref}{1.001}
-\registerctxluafile{node-ref}{1.001}
+\registerctxluafile{strc-rsc}{}
+\registerctxluafile{strc-ref}{}
+\registerctxluafile{node-ref}{}
\unprotect
-%D This module is a (partial) rewrite of core-ref.tex for \MKIV. As
-%D such it will be a moving target for a while.
-
-%D Later we will do a further cleanup and move much of the code to
-%D \LUA\ (i.e.\ better backend integration).
-
-\let\mainreference\gobblefivearguments
-
-% this will go when we got rid of the tuo file
-
-\let\currentfolioreference \!!zerocount % only used in xml-fo
-\let\resetreferences \relax
-\let\setreferences \relax
-\let\showcurrentreference \relax
-\let\setexecutecommandcheck\gobbletwoarguments
-
% todo : unknown/illegal reference no arg
% todo : +n pages check on 'samepage' (contrastcolor)
% todo : multiple text in reference
@@ -372,7 +356,7 @@
{\begingroup
\dowithnextbox
{\strc_references_set_page_only_destination_attribute{#1}%
- \hbox
+ \hpack % \hbox
\ifnum\lastdestinationattribute=\attributeunsetvalue\else attr \destinationattribute \lastdestinationattribute \fi
{\box\b_strc_destination_nodes\box\nextbox}%
\endgroup}}
@@ -495,13 +479,13 @@
{\scratchwidth \wd\nextbox
\scratchheight\ht\nextbox
\scratchdepth \dp\nextbox
- \setbox\nextbox\hbox
+ \setbox\nextbox\hbox % \hpack ?
{\framed[\c!frame=\v!off,#2]{\box\nextbox}}%
\strc_references_set_simple_reference{#1}%
- \setbox\nextbox\hbox attr \destinationattribute \currentdestinationattribute
+ \setbox\nextbox\hbox attr \destinationattribute \currentdestinationattribute % \hpack ?
{\strc_references_flush_destination_nodes
\box\nextbox}%
- \setbox\nextbox\hbox{\box\nextbox}%
+ \setbox\nextbox\hpack{\box\nextbox}%
\wd\nextbox\scratchwidth
\ht\nextbox\scratchheight
\dp\nextbox\scratchdepth
@@ -510,7 +494,7 @@
\def\strc_references_content_nop_finish#1#2%
{\strc_references_set_simple_reference{#1}%
- \hbox attr \destinationattribute \currentdestinationattribute
+ \hbox attr \destinationattribute \currentdestinationattribute % \hpack ?
{\strc_references_flush_destination_nodes
\box\nextbox}%
\egroup}
@@ -2252,27 +2236,6 @@
%D \in{figure}[match(mess)]
%D \stoptyping
-%D Tracing:
-
-\unexpanded\def\strc_references_tracer#1#2% \csleft csright
- {\hbox to \zeropoint \bgroup
- \hss
- \infofont
- \darkblue
- \ifx#1\empty\else
- \raise\strutht \hbox \s!to \zeropoint \bgroup
- \hss#1\hskip.2\emwidth
- \egroup
- \fi
- \vrule \s!height 1.5\strutht \s!depth \strutdp \s!width .1\emwidth
- \ifx#2\empty\else
- \raise\strutht \hbox \s!to \zeropoint \bgroup
- \hskip.2\emwidth#2\hss
- \egroup
- \fi
- \hss
- \egroup}%
-
\protect \endinput
% tricky:
diff --git a/tex/context/base/mkiv/strc-reg.lua b/tex/context/base/mkiv/strc-reg.lua
index 32924ad81..51771e3b8 100644
--- a/tex/context/base/mkiv/strc-reg.lua
+++ b/tex/context/base/mkiv/strc-reg.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['strc-reg'] = {
license = "see context related readme files"
}
-local next, type = next, type
+local next, type, tonumber = next, type, tonumber
local format, gmatch = string.format, string.gmatch
local equal, concat, remove = table.are_equal, table.concat, table.remove
local lpegmatch, P, C, Ct = lpeg.match, lpeg.P, lpeg.C, lpeg.Ct
@@ -387,6 +387,7 @@ local function initializer()
end
end
end
+ -- references.sortedinternals = sortedkeys(internalreferences) -- todo: when we need it more than once
end
local function finalizer()
diff --git a/tex/context/base/mkiv/strc-reg.mkiv b/tex/context/base/mkiv/strc-reg.mkiv
index 380cc9f22..04fdef9ad 100644
--- a/tex/context/base/mkiv/strc-reg.mkiv
+++ b/tex/context/base/mkiv/strc-reg.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Registers}
-\registerctxluafile{strc-reg}{1.001}
+\registerctxluafile{strc-reg}{}
\unprotect
diff --git a/tex/context/base/mkiv/strc-ren.mkiv b/tex/context/base/mkiv/strc-ren.mkiv
index 132f0f115..89aa6f55a 100644
--- a/tex/context/base/mkiv/strc-ren.mkiv
+++ b/tex/context/base/mkiv/strc-ren.mkiv
@@ -151,7 +151,7 @@
\strc_rendering_stop_placement}
\unexpanded\def\strc_rendering_place_head_empty
- {\hbox \headreferenceattributes {\getheadsyncs}}
+ {\hbox\headreferenceattributes{\getheadsyncs}} % \hpack ?
%D \starttyping
%D \def\StretchedBox#1%
@@ -221,24 +221,6 @@
\let\localheadsetup \strc_rendering_initialize_spacing % historic name
\let\headsetupspacing\strc_rendering_initialize_spacing}
-\def\strc_rendering_initialize_hsize_local
- {\global\d_strc_rendering_local_leftoffset \leftskip
- \global\d_strc_rendering_local_rightoffset\rightskip
- % \forgetall
- % \leftskip \d_strc_rendering_local_leftoffset % no stretch
- % \rightskip\d_strc_rendering_local_rightoffset % no stretch
- % \setlocalhsize
- % \hsize\localhsize
- % \forgetbothskips}
- \scratchwidth\availablehsize
- \forgetall
- \hsize\scratchwidth}
-
-\def\strc_rendering_initialize_hsize_global
- {\global\d_strc_rendering_local_leftoffset \zeropoint
- \global\d_strc_rendering_local_rightoffset\zeropoint
- \forgetall}
-
\def\strc_rendering_initialize_interaction
{\resetinteractionparameter\c!style
\resetinteractionparameter\c!color
@@ -308,70 +290,33 @@
\d_strc_rendering_hang_height\zeropoint
\fi}
-% \def\strc_rendering_stop_placement
-% {\n_strc_rendering_hang_lines\zerocount
-% \ifconditional\headisdisplay
-% \strc_rendering_initialize_line_hang
-% % kind of special, we want to snap heads also according to local specs local
-% \ifgridsnapping
-% \hbox\bgroup % extra hbox will trigger global snapper on top of local
-% \edef\p_grid{\headparameter\c!grid}%
-% \ifconditional\headisdisplay
-% \ifx\p_grid\empty\else
-% \useheadstyleandcolor\c!style\c!color
-% \setupinterlinespace
-% \useheadstyleandcolor\c!textstyle\c!textcolor
-% \setupinterlinespace
-% \fi
-% \fi
-% \snaptogrid[\p_grid]\hbox
-% {\hskip\dimexpr\d_strc_rendering_local_leftoffset+\headparameter\c!margin\relax\box\b_strc_rendering_head}%
-% \egroup
-% \else
-% \hbox
-% {\hskip\dimexpr\d_strc_rendering_local_leftoffset+\headparameter\c!margin\relax\box\b_strc_rendering_head}%
-% \fi
-% \flushnotes % new, not really needed
-% \endgraf
-% \ifvmode
-% \ifnum\n_strc_rendering_hang_lines>\zerocount
-% \dorecurse\n_strc_rendering_hang_lines{\nointerlineskip\dosomebreak\nobreak\strut\endgraf}% to be checked
-% \fi
-% \nointerlineskip
-% \dosomebreak\nobreak
-% \fi
-% \getheadsyncs
-% \else
-% % somehow this goes ok even when we push in the margin probably because we gobble pars
-% % in the process of collecting index entries etc
-% \strut
-% \flushnotes % new, here since we're in par mode
-% \unhbox\b_strc_rendering_head
-% \getheadsyncs
-% \ifconditional\headissomewhere
-% % nothing special
-% \else
-% %\hskip\headnumberdistance\s!plus\headnumberdistance\s!minus.25\dimexpr\headnumberdistance\relax
-% \hskip\headtextdistance\relax
-% \strc_sectioning_inject_continuous_signal
-% \fi
-% \fi
-% \ifconditional\headisdisplay
-% \ifvmode
-% \ifgridsnapping % important, font related depth, see comment
-% \prevdepth\strutdp
-% \else
-% \prevdepth\d_strc_rendering_local_depth
-% \fi
-% \fi
-% \fi
-% \egroup
-% \egroup
-% \ifconditional\headisdisplay
-% \useindentnextparameter\headparameter
-% \else
-% \nonoindentation % recently added, was a bug
-% \fi}
+\def\strc_rendering_initialize_hsize_local
+ {\global\d_strc_rendering_local_leftoffset \leftskip
+ \global\d_strc_rendering_local_rightoffset\rightskip
+ % \forgetall
+ % \leftskip \d_strc_rendering_local_leftoffset % no stretch
+ % \rightskip\d_strc_rendering_local_rightoffset % no stretch
+ % \setlocalhsize
+ % \hsize\localhsize
+ % \forgetbothskips}
+ \scratchwidth\availablehsize
+ \forgetall
+ \hsize\scratchwidth}
+
+\def\strc_rendering_initialize_hsize_global
+ {\global\d_strc_rendering_local_leftoffset \zeropoint
+ \global\d_strc_rendering_local_rightoffset\zeropoint
+ \forgetall}
+
+% \def\strc_sectioning_stay_on_this_line
+% {\directcheckedvspacing{-\v!line,\v!samepage,\v!nowhite}%
+% \directcheckedvspacing\v!disable}
+%
+% we now use \ignoreparskip, so:
+
+\def\strc_sectioning_stay_on_this_line
+ {\directcheckedvspacing{-\v!line,\v!samepage}%
+ \directcheckedvspacing\v!disable}
\def\strc_rendering_stop_placement
{\n_strc_rendering_hang_lines\zerocount
@@ -409,7 +354,7 @@
\unhbox\b_strc_rendering_head
\getheadsyncs
\ifconditional\headissomewhere
- % nothing special
+ \strc_sectioning_stay_on_this_line % test case: alternative=margintext and \startparagraph ..
\else
%\hskip\headnumberdistance\s!plus\headnumberdistance\s!minus.25\dimexpr\headnumberdistance\relax
\hskip\headtextdistance\relax
@@ -430,7 +375,8 @@
\ifconditional\headisdisplay
\useindentnextparameter\headparameter
\else
- \nonoindentation % recently added, was a bug
+ \ignoreparskip
+ \noindentation % recently added, was a bug
\fi}
% nice testcase
diff --git a/tex/context/base/mkiv/strc-sec.mkiv b/tex/context/base/mkiv/strc-sec.mkiv
index b0771b475..4e5115a7d 100644
--- a/tex/context/base/mkiv/strc-sec.mkiv
+++ b/tex/context/base/mkiv/strc-sec.mkiv
@@ -86,7 +86,8 @@
\c!sectionset=\headparameter\c!sectionset,
\c!sectionsegments=\headparameter\c!sectionsegments,
\c!reference=\headparameter\c!reference,
- \c!referenceprefix=\headparameter\c!referenceprefix]
+ \c!referenceprefix=\headparameter\c!referenceprefix,
+ \c!criterium=\headparameter\c!criterium]
% see lists/neat-001.tex for usage of:
@@ -112,7 +113,14 @@
\globallet\currentstructurebookmark\tempstring
\endgroup}
-% so it's an experiment
+% zeros:
+%
+% \setuphead[subsection][criterium=all]
+%
+% \dorecurse{3} {
+% \chapter{Blabla} \subsection{bla 1 1} \subsection{bla 1 2}
+% \section{bla 2} \subsection{bla 2 1} \subsection{bla 2 2}
+% }
\unexpanded\def\strc_sectioning_register#1#2#3% #1=interfaced-settings, #2=optional user data (not yet supported)
{\begingroup
@@ -236,6 +244,7 @@
segments {\structureparameter\c!sectionsegments}
ownnumber {\currentstructureownnumber}
language {\currentlanguage}% for the moment, needed for bookmarks conversion
+ criterium {\structureparameter\c!criterium}
}
userdata {\detokenize{#3}}% will be converted to table at the lua end
\relax
@@ -462,9 +471,11 @@
% compatible but better
\appendtoks
- \doifelse{\headparameter\c!ownnumber}\v!yes
- {\setuevalue\currenthead{\strc_sectioning_handle_own[\currenthead]}}
- {\setuevalue\currenthead{\strc_sectioning_handle_nop[\currenthead]}}%
+ \ifx\currenthead\empty \else
+ \doifelse{\headparameter\c!ownnumber}\v!yes
+ {\setuevalue\currenthead{\strc_sectioning_handle_own[\currenthead]}}
+ {\setuevalue\currenthead{\strc_sectioning_handle_nop[\currenthead]}}%
+ \fi
\to \everysetuphead
\unexpanded\def\doredefinehead#1#2% called at lua end
@@ -585,10 +596,10 @@
% mkii compatible. Somewhat weird that it's part of the
% top level structure but it will be flattened anyway.
\let\currenthead\currentsection %
- \setheadparameter\c!textstyle {\strictheadparameter\c!style}%
- \setheadparameter\c!textcolor {\strictheadparameter\c!color}%
- \setheadparameter\c!numberstyle{\strictheadparameter\c!style}%
- \setheadparameter\c!numbercolor{\strictheadparameter\c!color}%
+ \setheadparameter\c!textstyle {\directheadparameter\c!style}%
+ \setheadparameter\c!textcolor {\directheadparameter\c!color}%
+ \setheadparameter\c!numberstyle{\directheadparameter\c!style}%
+ \setheadparameter\c!numbercolor{\directheadparameter\c!color}%
\to \everydefinesection
% head -> head
@@ -660,6 +671,7 @@
\newconditional\c_strc_sectioning_place
\newconditional\c_strc_sectioning_empty
\newconditional\c_strc_sectioning_hidden
+\newconditional\c_strc_sectioning_section
\newconditional\headshownumber % public
\newconditional\headisdisplay % public
@@ -690,22 +702,32 @@
\setvalue{\??headplace\v!yes}%
{\setfalse\c_strc_sectioning_empty
\settrue \c_strc_sectioning_place
- \setfalse\c_strc_sectioning_hidden}
+ \setfalse\c_strc_sectioning_hidden
+ \setfalse\c_strc_sectioning_section}
\setvalue{\??headplace\v!empty}%
{\settrue \c_strc_sectioning_empty
\settrue \c_strc_sectioning_place
- \setfalse\c_strc_sectioning_hidden}
+ \setfalse\c_strc_sectioning_hidden
+ \setfalse\c_strc_sectioning_section}
\setvalue{\??headplace\v!no}%
{\settrue \c_strc_sectioning_empty
\setfalse\c_strc_sectioning_place
- \setfalse\c_strc_sectioning_hidden}
+ \setfalse\c_strc_sectioning_hidden
+ \setfalse\c_strc_sectioning_section}
\setvalue{\??headplace\v!hidden}%
{\settrue \c_strc_sectioning_empty
\setfalse\c_strc_sectioning_place
- \settrue \c_strc_sectioning_hidden}
+ \settrue \c_strc_sectioning_hidden
+ \setfalse\c_strc_sectioning_section}
+
+\setvalue{\??headplace\v!section}%
+ {\settrue \c_strc_sectioning_empty
+ \setfalse\c_strc_sectioning_place
+ \settrue \c_strc_sectioning_hidden
+ \settrue \c_strc_sectioning_section}
\unexpanded\def\strc_sectioning_initialize_placement
{\expandnamespaceparameter\??headplace\headparameter\c!placehead\v!yes}
@@ -747,7 +769,8 @@
% so we need a 'keep track of raw set option' (or maybe a funny internal prefix)
\unexpanded\def\setheadmarking % li:: so that we can use \marking[section]{Taco needed this}
- {\normalexpanded{\setmarking[\currenthead]{li::\currentstructurelistnumber}}}
+ {\strc_sectioning_delayed_flush
+ \normalexpanded{\setmarking[\currenthead]{li::\currentstructurelistnumber}}}
\let\deepstructurenumbercommand\relax
\let\deepstructuretitlecommand \relax
@@ -852,6 +875,7 @@
\strc_sectioning_before_yes
\strc_sectioning_register{#1}{#2}{#3}% after optional \page
\strc_sectioning_report
+ \dostarttagged\t!sectioncaption\empty
\let\getheadsyncs\theheadsynchonization
\let\getheadtitle\fullheadtitle
\ifconditional\headshownumber
@@ -860,18 +884,27 @@
\else
\strc_rendering_place_head_text
\fi
+ \dostoptagged
\strc_sectioning_after_yes
\else\ifconditional\c_strc_sectioning_hidden
\strc_sectioning_register{#1}{#2}{#3}% after optional \page
\strc_sectioning_report
+ \dostarttagged\t!sectioncaption\empty
\let\getheadsyncs\theheadsynchonization
- \strc_rendering_place_head_hidden % only something when tracing
+ \ifconditional\c_strc_sectioning_section
+ \strc_rendering_place_head_section
+ \else
+ \strc_rendering_place_head_hidden % only something when tracing
+ \fi
+ \dostoptagged
\else
\strc_sectioning_before_nop % toegevoegd ivm subpaginanr / tug sheets
\strc_sectioning_register{#1}{#2}{#3}% after optional \page
\strc_sectioning_report
+ \dostarttagged\t!sectioncaption\empty
\let\getheadsyncs\theheadsynchonization
\strc_rendering_place_head_empty % just flush 'm
+ \dostoptagged
\strc_sectioning_after_nop
\fi\fi
\else
@@ -879,22 +912,32 @@
\strc_sectioning_before_yes
\strc_sectioning_register{#1}{#2}{#3}% after optional \page
\strc_sectioning_report
+ \dostarttagged\t!sectioncaption\empty
\let\getheadsyncs\theheadsynchonization
\let\getheadtitle\fullheadtitle
\strc_rendering_place_head_text
+ \dostoptagged
\strc_sectioning_after_yes
\else\ifconditional\c_strc_sectioning_hidden
\strc_sectioning_register{#1}{#2}{#3}% after optional \page
\strc_sectioning_report
\let\getheadsyncs\theheadsynchonization
- \strc_rendering_place_head_hidden % only something when tracing
+ \dostarttagged\t!sectioncaption\empty
+ \ifconditional\c_strc_sectioning_section
+ \strc_rendering_place_head_section
+ \else
+ \strc_rendering_place_head_hidden % only something when tracing
+ \fi
+ \dostoptagged
\else
% do nothing / should be vbox to 0pt
\strc_sectioning_before_nop
\strc_sectioning_register{#1}{#2}{#3}% after optional \page
\strc_sectioning_report
+ \dostarttagged\t!sectioncaption\empty
\let\getheadsyncs\theheadsynchonization
\strc_rendering_place_head_empty % just flush 'm
+ \dostoptagged
\strc_sectioning_after_nop
\fi\fi
\fi
@@ -930,13 +973,27 @@
% todo: when in the page builder we need to resolve the marking immediately
% because otherwise we have an async
+\newbox\b_sectioning_delayed
+
+\def\strc_sectioning_delayed_flush
+ {\ifvoid\b_sectioning_delayed\else
+ \smashboxed\b_sectioning_delayed
+ \fi}
+
+\unexpanded\def\strc_rendering_place_head_section % see hidden below
+ {\global\setbox\b_sectioning_delayed\hpack\bgroup
+ \setmarking[\currentheadcoupling]{li::\currentstructurelistnumber}%
+ \hpack\headreferenceattributes{}%
+ \currentstructuresynchronize
+ \egroup}
+
\unexpanded\def\strc_rendering_place_head_hidden % maybe trialtypesetting check
{\setxvalue{\??hiddenheadattr\currenthead}%
{\headreferenceattributes}% can be used when making a box
\setxvalue{\??hiddenheadsync\currenthead}%
{\noexpand\letgvalue{\??hiddenheadsync\currenthead}\relax
\noexpand\setmarking[\currentheadcoupling]{li::\currentstructurelistnumber}%
- \hbox\headreferenceattributes{}% otherwise no destination ... maybe tag ref as hidden and fall back on page reference
+ \hpack\headreferenceattributes{}% otherwise no destination ... maybe tag ref as hidden and fall back on page reference
\currentstructuresynchronize}} % and it's a node anyway
\def\synchronizehead #1{\csname\??hiddenheadsync#1\endcsname}
@@ -1031,7 +1088,7 @@
{\ifconditional\headisdisplay
\ifconditional\c_strc_sectioning_auto_break
% \vspacing[\v!samepage-\currentheadlevel]%
-\vspacing[\v!samepage]%
+ \vspacing[\v!samepage]%
\fi
\strc_sectioning_empty_correction
\headparameter\c!after
diff --git a/tex/context/base/mkiv/strc-syn.mkiv b/tex/context/base/mkiv/strc-syn.mkiv
index 72c94d069..1fb079f04 100644
--- a/tex/context/base/mkiv/strc-syn.mkiv
+++ b/tex/context/base/mkiv/strc-syn.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Synonyms and Sorting}
-\registerctxluafile{strc-syn}{1.001}
+\registerctxluafile{strc-syn}{}
%D Although we could nowadays build this on top of regular lists we keep this
%D more efficient variant around. Eventually we can add some options to lists
@@ -162,9 +162,9 @@
%c!hang=,
%c!sample=,
\c!margin=\v!no,
- \c!before=\blank,
+ \c!before=\startpacked,
\c!inbetween=\blank,
- \c!after=\blank,
+ \c!after=\stoppacked,
%c!indentnext=,
%c!indenting=,
%
@@ -335,14 +335,16 @@
[\c!alternative=\v!normal]
\unexpanded\def\strc_synonyms_insert_meaning#1#2% name tag
- {\begingroup
+ {\dontleavehmode % otherwise we don't get it right at the beginning of a par
+ \begingroup
\def\currentsimplelist{#1}%
\def\currentsynonymtag{#2}%
\fastsetup{\??simplelistrenderings::\v!text}%
\endgroup}
\unexpanded\def\strc_synonyms_insert#1#2% name tag
- {\begingroup
+ {\dontleavehmode % otherwise we don't get it right at the beginning of a par
+ \begingroup
\edef\currentsimplelist{#1}%
\let \currentsynonym\currentsimplelist % for a while
\def \currentsynonymtag{#2}%
@@ -353,7 +355,7 @@
\normalexpanded{\endgroup\simplelistparameter\c!next}}
% \setupsimplelistalternative
-% [\c!command=\strictsimplelistparameter\c!command]
+% [\c!command=\directsimplelistparameter\c!command]
\definesimplelistalternative
[\v!normal]
@@ -422,8 +424,9 @@
{\strc_constructions_initialize{#1}%
\setupcurrentsimplelist[#2]%
\let\synonymentry\strc_synonym_normal
- \startpacked
- \clf_processsynonyms
+ % so we can hook tabulate into before and after
+ \normalexpanded{\simplelistparameter\c!before
+ \noexpand\clf_processsynonyms
{#1}%
{%
criterium {\simplelistparameter\c!criterium}%
@@ -431,7 +434,8 @@
method {\simplelistparameter\c!method}%
}%
\relax
- \stoppacked}%
+ \simplelistparameter\c!after}%
+ \relax}%
{}% todo: message that invalid
\endgroup}
@@ -465,8 +469,8 @@
%\c!command=, % we test for defined !
%\c!criterium=,
%\c!style=,
- %\c!before=,
- \c!after=\endgraf,
+ \c!before=\startpacked,
+ \c!after=\stoppacked,
%\c!expansion=,
\c!method=]
@@ -549,7 +553,8 @@
[\c!alternative=\v!normal]
\unexpanded\def\strc_sorting_insert#1#2% name tag
- {\begingroup
+ {\dontleavehmode % otherwise we don't get it right at the beginning of a par
+ \begingroup
% no kap currently, of .. we need to map cap onto WORD
\edef\currentsorting{#1}%
\def \currentsortingtag{#2}%
@@ -593,6 +598,7 @@
\unexpanded\def\placelistofsorts
{\dodoubleempty\strc_sorting_place_list}
+
\def\strc_sorting_place_list[#1][#2]%
{\begingroup
\edef\currentsimplelist{#1}%
@@ -604,8 +610,9 @@
\else
\let\synonymentry\strc_sorting_command
\fi
- \startpacked
- \clf_processsynonyms
+ % so we can hook tabulate into before and after
+ \normalexpanded{\simplelistparameter\c!before
+ \noexpand\clf_processsynonyms
{#1}%
{%
criterium {\simplelistparameter\c!criterium}%
@@ -613,7 +620,7 @@
method {\simplelistparameter\c!method}%
}%
\relax
- \stoppacked
+ \simplelistparameter\c!after}%
\endgroup}
\unexpanded\def\completelistofsorts
diff --git a/tex/context/base/mkiv/strc-tag.lua b/tex/context/base/mkiv/strc-tag.lua
index 9d1fec33e..1be44821e 100644
--- a/tex/context/base/mkiv/strc-tag.lua
+++ b/tex/context/base/mkiv/strc-tag.lua
@@ -60,7 +60,7 @@ tags.specifications = specifications
local p_splitter = C((1-S(">"))^1) * P(">") * C(P(1)^1)
tagpatterns.splitter = p_splitter
-local properties = allocate {
+local properties = allocate { -- todo: more "record = true" to improve formatting
document = { pdf = "Div", nature = "display" },
@@ -71,6 +71,7 @@ local properties = allocate {
highlight = { pdf = "Span", nature = "inline" },
section = { pdf = "Sect", nature = "display" },
+ sectioncaption = { pdf = "Div", nature = "display", record = true },
sectiontitle = { pdf = "H", nature = "mixed" },
sectionnumber = { pdf = "H", nature = "mixed" },
sectioncontent = { pdf = "Div", nature = "display" },
@@ -282,6 +283,10 @@ function tags.registermetadata(data)
end
end
+function tags.getmetadata()
+ return documentdata or { }
+end
+
function tags.start(tag,specification)
if not enabled then
codeinjections.enabletags()
diff --git a/tex/context/base/mkiv/strc-tag.mkiv b/tex/context/base/mkiv/strc-tag.mkiv
index 7fdfd7afa..2ee71d67c 100644
--- a/tex/context/base/mkiv/strc-tag.mkiv
+++ b/tex/context/base/mkiv/strc-tag.mkiv
@@ -16,7 +16,7 @@
\writestatus{loading}{ConTeXt Structure Macros / Tags}
-\registerctxluafile{strc-tag}{1.001}
+\registerctxluafile{strc-tag}{}
\unprotect
@@ -32,6 +32,7 @@
\def\t!highlight {highlight} % Span
\def\t!section {section} % Sect
+\def\t!sectioncaption {sectioncaption} % Div
\def\t!sectiontitle {sectiontitle} % H
\def\t!sectionnumber {sectionnumber} % H
\def\t!sectioncontent {sectioncontent} % Div
@@ -362,6 +363,9 @@
\doifelse{\taggingparameter\c!state}\v!start{\the\everyenableelements}{\the\everydisableelements}%
\to \everysetuptagging
+\unexpanded\def\forgettagging
+ {\attribute\taggedattribute\attributeunsetvalue}
+
\setuptagging
[\c!state=\v!stop,
\c!method=\v!auto]
@@ -535,4 +539,6 @@
\unexpanded\def\bpar{\dostarttagged\t!paragraph\empty\strc_tagged_saved_bpar}
\unexpanded\def\epar{\strc_tagged_saved_epar\dostoptagged}
+% \def\untagged{attr \taggedattribute\attributeunsetvalue}
+
\protect
diff --git a/tex/context/base/mkiv/supp-box.lua b/tex/context/base/mkiv/supp-box.lua
index b9bf0ccf0..664477cfc 100644
--- a/tex/context/base/mkiv/supp-box.lua
+++ b/tex/context/base/mkiv/supp-box.lua
@@ -8,10 +8,12 @@ if not modules then modules = { } end modules ['supp-box'] = {
-- this is preliminary code, use insert_before etc
-local lpegmatch = lpeg.match
-
local report_hyphenation = logs.reporter("languages","hyphenation")
+local tonumber, next, type = tonumber, next, type
+
+local lpegmatch = lpeg.match
+
local tex = tex
local context = context
local nodes = nodes
@@ -30,9 +32,10 @@ local nuts = nodes.nuts
local tonut = nuts.tonut
local tonode = nuts.tonode
-local getfield = nuts.getfield
+----- getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
+local getboth = nuts.getboth
local getdisc = nuts.getdisc
local getid = nuts.getid
local getlist = nuts.getlist
@@ -42,7 +45,7 @@ local getdir = nuts.getdir
local getwidth = nuts.getwidth
local takebox = nuts.takebox
-local setfield = nuts.setfield
+----- setfield = nuts.setfield
local setlink = nuts.setlink
local setboth = nuts.setboth
local setnext = nuts.setnext
@@ -78,9 +81,8 @@ local texsetdimen = tex.setdimen
local function hyphenatedlist(head,usecolor)
local current = head and tonut(head)
while current do
- local id = getid(current)
- local next = getnext(current)
- local prev = getprev(current)
+ local id = getid(current)
+ local prev, next = getboth(current)
if id == disc_code then
local pre, post, replace = getdisc(current)
if not usecolor then
@@ -97,31 +99,14 @@ local function hyphenatedlist(head,usecolor)
flush_list(replace)
end
setdisc(current)
- setboth(current)
--- local list = setlink (
--- pre and new_penalty(10000),
--- pre,
--- current,
--- post,
--- post and new_penalty(10000)
--- )
--- local tail = find_tail(list)
--- if prev then
--- setlink(prev,list)
--- end
--- if next then
--- setlink(tail,next)
--- end
- setlink (
- prev, -- there had better be one
- pre and new_penalty(10000),
- pre,
- current,
- post,
- post and new_penalty(10000),
- next
- )
- -- flush_node(current)
+ if pre then
+ setlink(prev,new_penalty(10000),pre)
+ setlink(find_tail(pre),current)
+ end
+ if post then
+ setlink(current,new_penalty(10000),post)
+ setlink(find_tail(post),next)
+ end
elseif id == vlist_code or id == hlist_code then
hyphenatedlist(getlist(current))
end
@@ -164,8 +149,10 @@ end
implement {
name = "showhyphenatedinlist",
arguments = "integer",
- actions = function(box)
- report_hyphenation("show: %s",listtoutf(checkedlist(n),false,true))
+ actions = function(n)
+ -- we just hyphenate (as we pass a hpack) .. a bit too much casting but ...
+ local l = languages.hyphenators.handler(tonode(checkedlist(n)))
+ report_hyphenation("show: %s",listtoutf(l,false,true))
end
}
@@ -350,6 +337,7 @@ implement {
local result = new_hlist()
setlist(result,head)
setbox(target,result)
+ -- setbox(target,new_hlist(head))
end
}
diff --git a/tex/context/base/mkiv/supp-box.mkiv b/tex/context/base/mkiv/supp-box.mkiv
index 9d2817cee..fb9cbdf5d 100644
--- a/tex/context/base/mkiv/supp-box.mkiv
+++ b/tex/context/base/mkiv/supp-box.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{supp-box}{1.001}
+\registerctxluafile{supp-box}{}
% This file is partially cleaned up.
@@ -79,6 +79,7 @@
\def\strutht {\ht\strutbox}
\def\strutwd {\wd\strutbox}
\def\struthtdp{\dimexpr\ht\strutbox+\dp\strutbox\relax}
+\def\strutgap {\dimexpr\ht\strutbox-\dp\strutbox\relax}
%D \macros
%D {voidbox,nextbox}
@@ -1071,8 +1072,8 @@
\unexpanded\def\doshowhyphenatednextbox
{\clf_showhyphenatedinlist\nextbox}
-\unexpanded\def\showhyphens
- {\dowithnextboxcs\doshowhyphenatednextbox\hbox}
+\unexpanded\def\showhyphens % hpack: so no processing (we hyphenate in lua)
+ {\dowithnextboxcs\doshowhyphenatednextbox\hpack}
%D The following macros are seldom used but handy for tracing.
%D
@@ -2668,7 +2669,7 @@
%D A funny (but rather stupid) one, plus a redefinition.
\unexpanded\def\removebottomthings
- {\dorecurse5{\unskip\unkern\unpenalty}}
+ {\dorecurse\plusfive{\unskip\unkern\unpenalty}}
\unexpanded\def\removelastskip % \ifvmode the plain tex one \fi
{\ifvmode\ifzeropt\lastskip\else\vskip-\lastskip\fi\fi}
@@ -2746,19 +2747,12 @@
%D And even rawer:
-\let\naturalhbox \hbox
-\let\naturalvbox \vbox
-\let\naturalvtop \normalvtop
-\let\naturalvcenter\normalvtop
-
-\ifdefined\textdir
-
- \unexpanded\def\naturalhbox {\hbox dir TLT}
- \unexpanded\def\naturalvbox {\vbox dir TLT}
- \unexpanded\def\naturalhpack{\hpack dir TLT}
- \unexpanded\def\naturalvpack{\vpack dir TLT}
-
-\fi
+ \let\naturalvtop \normalvtop
+ \let\naturalvcenter\normalvtop
+\unexpanded\def\naturalhbox {\hbox dir TLT}
+\unexpanded\def\naturalvbox {\vbox dir TLT}
+\unexpanded\def\naturalhpack {\hpack dir TLT}
+\unexpanded\def\naturalvpack {\vpack dir TLT}
%D \macros
%D {vcenter}
@@ -2973,7 +2967,12 @@
%D Who knows when this comes in handy:
-\unexpanded\def\lastlinewidth{\dimexpr\clf_lastlinelength\scaledpoint\relax}
+\unexpanded\def\lastlinewidth{\dimexpr\clf_lastlinewidth\scaledpoint\relax}
+
+%D Keep as reference:
+
+% \unexpanded\def\tightvbox{\dowithnextbox{\dp\nextbox\zeropoint\box\nextbox}\vbox}
+% \unexpanded\def\tightvtop{\dowithnextbox{\ht\nextbox\zeropoint\box\nextbox}\vtop}
\protect \endinput
diff --git a/tex/context/base/mkiv/supp-dir.mkiv b/tex/context/base/mkiv/supp-dir.mkiv
index 6251012b6..42a0aa37c 100644
--- a/tex/context/base/mkiv/supp-dir.mkiv
+++ b/tex/context/base/mkiv/supp-dir.mkiv
@@ -30,9 +30,7 @@
% \ifnum\thetextdir=0 L\else R\fi \textdir TRT \ifnum\thetextdir=0 L\else R\fi
\unexpanded\def\showdirsinmargin
- {\doifelse{\the\pardir}\!!TLT
- {\inleft{\normalexpanded{\noexpand\hbox dir TLT{\ttxx[\the\pardir,\the\textdir]}}}}
- {\inleft{\normalexpanded{\noexpand\hbox dir TLT{\ttxx[\the\pardir,\the\textdir]}}}}}
+ {\inleft{\normalexpanded{\noexpand\hbox dir TLT{\ttxx[\the\pardir,\the\textdir]}}}}
\bgroup
\catcode`L=\othercatcode \gdef\istltdir#1#2#3{\if#2L0\else1\fi}
diff --git a/tex/context/base/mkiv/supp-mat.mkiv b/tex/context/base/mkiv/supp-mat.mkiv
index b265a7c5a..176233ae5 100644
--- a/tex/context/base/mkiv/supp-mat.mkiv
+++ b/tex/context/base/mkiv/supp-mat.mkiv
@@ -314,4 +314,9 @@
% \def\startdisplay{\displaybreak\ignorespaces\startpacked}
% \def\stopdisplay {\stoppacked\displaybreak\ignorespaces}
+\let\superscript \Usuperscript
+\let\subscript \Usubscript
+\let\nosuperscript\Unosuperscript
+\let\nosubscript \Unosubscript
+
\protect \endinput
diff --git a/tex/context/base/mkiv/supp-ran.lua b/tex/context/base/mkiv/supp-ran.lua
index 8bfc09e58..1ad9293ee 100644
--- a/tex/context/base/mkiv/supp-ran.lua
+++ b/tex/context/base/mkiv/supp-ran.lua
@@ -24,7 +24,7 @@ local randomseed = math.randomseed
local round = math.round
local stack = { }
local last = 1
-local maxcount = 2^30-1 -- 1073741823
+local maxcount = 0x3FFFFFFF -- 2^30-1
math.random = function(...)
local n = random(...)
diff --git a/tex/context/base/mkiv/supp-ran.mkiv b/tex/context/base/mkiv/supp-ran.mkiv
index 5b70a075f..59707836a 100644
--- a/tex/context/base/mkiv/supp-ran.mkiv
+++ b/tex/context/base/mkiv/supp-ran.mkiv
@@ -16,7 +16,7 @@
%D This module is downward compatible in the sense that we've kept
%D the user interface (which uses intermediate variables).
-\registerctxluafile{supp-ran}{1.001}
+\registerctxluafile{supp-ran}{}
\unprotect
diff --git a/tex/context/base/mkiv/supp-vis.mkiv b/tex/context/base/mkiv/supp-vis.mkiv
index e12c4c534..23dfcb5fa 100644
--- a/tex/context/base/mkiv/supp-vis.mkiv
+++ b/tex/context/base/mkiv/supp-vis.mkiv
@@ -11,6 +11,8 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
+\endinput
+
%D This module is no longer generic \unknown\ but who cares. Maybe
%D the code will be moved out of the core (apart from ruled boxes).
@@ -708,7 +710,7 @@
\newdimen\testrulewidth \testrulewidth=\boxrulewidth
\newif\ifvisiblestretch \visiblestretchfalse
-
+
%D \macros
%D {ruledhss,
%D ruledhfil,ruledhfilneg,
@@ -892,7 +894,7 @@
\unexpanded\def\ruledvfill {\doruledvfiller\normalvfill \normalvfillneg{-12}}
\unexpanded\def\ruledvfilneg {\doruledvfiller\normalvfilneg \normalvfil {8}}
\unexpanded\def\ruledvfillneg{\doruledvfiller\normalvfillneg\normalvfill {16}}
-
+
%D \macros
%D {ruledhskip}
%D
@@ -1401,7 +1403,7 @@
\unexpanded\def\ruledvglue
{\bgroup
\afterassignment\supp_visualizers_vglue_indeed\scratchskip=}
-
+
%D \macros
%D {ruledmkern,ruledmskip}
%D
@@ -1696,8 +1698,6 @@
%D For those who want to manipulate the visual cues in detail,
%D we have grouped them.
-\newif\ifshowingcomposition % see later why we need this
-
\unexpanded\def\showfils
{\showingcompositiontrue
\let\hss \ruledhss
diff --git a/tex/context/base/mkiv/symb-emj.mkiv b/tex/context/base/mkiv/symb-emj.mkiv
index 22d8b4a07..e063b6a7a 100644
--- a/tex/context/base/mkiv/symb-emj.mkiv
+++ b/tex/context/base/mkiv/symb-emj.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Symbol Libraries / Emoji}
-\registerctxluafile{symb-emj}{1.001}
+\registerctxluafile{symb-emj}{}
\unprotect
diff --git a/tex/context/base/mkiv/symb-ini.mkiv b/tex/context/base/mkiv/symb-ini.mkiv
index 23879b43a..e4950e09d 100644
--- a/tex/context/base/mkiv/symb-ini.mkiv
+++ b/tex/context/base/mkiv/symb-ini.mkiv
@@ -17,7 +17,7 @@
\writestatus{loading}{ConTeXt Symbol Libraries / Initialization}
-\registerctxluafile{symb-ini}{1.001}
+\registerctxluafile{symb-ini}{}
\unprotect
@@ -301,7 +301,7 @@
%D
%D \showsetup{showsymbolset}
-\fetchruntimecommand \showsymbolset {\f!symbolprefix\s!run}
+\fetchruntimecommand \showsymbolset \f!symb_run
%D \macros
%D {usesymbols}
diff --git a/tex/context/base/mkiv/syst-aux.lua b/tex/context/base/mkiv/syst-aux.lua
index 34e5c4e88..06f6176b1 100644
--- a/tex/context/base/mkiv/syst-aux.lua
+++ b/tex/context/base/mkiv/syst-aux.lua
@@ -11,7 +11,7 @@ if not modules then modules = { } end modules ['syst-aux'] = {
-- utfmatch(str,"(.?)(.*)$")
-- utf.sub(str,1,1)
-local tonumber = tonumber
+local tonumber, next = tonumber, next
local utfsub = utf.sub
local P, S, R, C, Cc, Cs, Carg, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Carg, lpeg.match
local next = next
@@ -23,7 +23,7 @@ local formatters = string.formatters
local setcatcode = tex.setcatcode
local utf8character = lpeg.patterns.utf8character
local settings_to_array = utilities.parsers.settings_to_array
-local settings_to_set = utilities.parsers.settings_to_set
+local settings_to_set = utilities.parsers.settings_to_set
local setmacro = interfaces.setmacro
local pattern = C(utf8character^-1) * C(P(1)^0)
@@ -390,8 +390,8 @@ local function doifelsecommon(a,b)
end
return
end
- local ba = find(a,",")
- local bb = find(b,",")
+ local ba = find(a,",",1,true)
+ local bb = find(b,",",1,true)
if ba and bb then
local ha = hash[a]
local hb = hash[b]
@@ -433,8 +433,8 @@ local function doifcommon(a,b)
end
return
end
- local ba = find(a,",")
- local bb = find(b,",")
+ local ba = find(a,",",1,true)
+ local bb = find(b,",",1,true)
if ba and bb then
local ha = hash[a]
local hb = hash[b]
@@ -476,8 +476,8 @@ local function doifnotcommon(a,b)
end
return
end
- local ba = find(a,",")
- local bb = find(b,",")
+ local ba = find(a,",",1,true)
+ local bb = find(b,",",1,true)
if ba and bb then
local ha = hash[a]
local hb = hash[b]
@@ -519,7 +519,7 @@ local function doifelseinset(a,b)
end
return
end
- local bb = find(b,",")
+ local bb = find(b,",",1,true)
if bb then
if hash[b][a] then
-- if settings_to_set(b)[a] then
@@ -542,7 +542,7 @@ local function doifinset(a,b)
end
return
end
- local bb = find(b,",")
+ local bb = find(b,",",1,true)
if bb then
if hash[b][a] then
-- if settings_to_set(b)[a] then
@@ -565,7 +565,7 @@ local function doifnotinset(a,b)
end
return
end
- local bb = find(b,",")
+ local bb = find(b,",",1,true)
if bb then
if hash[b][a] then
-- if settings_to_set(b)[a] then
@@ -652,3 +652,18 @@ implement {
-- context(s)
-- end
-- }
+
+local bp = number.dimenfactors.bp
+
+interfaces.implement {
+ name = "tobigpoints",
+ actions = function(d) context("%.5F",bp * d) end,
+ arguments = "dimension",
+}
+
+interfaces.implement {
+ name = "towholebigpoints",
+ actions = function(d) context("%r",bp * d) end,
+ arguments = "dimension",
+}
+
diff --git a/tex/context/base/mkiv/syst-aux.mkiv b/tex/context/base/mkiv/syst-aux.mkiv
index eb5b3b90a..77f947753 100644
--- a/tex/context/base/mkiv/syst-aux.mkiv
+++ b/tex/context/base/mkiv/syst-aux.mkiv
@@ -16,7 +16,7 @@
%D used. After all it's not that urgent and replacing helpers is a delicate process.
%D Don't depend on it.
-\registerctxluafile{syst-aux}{1.001}
+\registerctxluafile{syst-aux}{}
% A dedicated primitive \ifvoidmacro\cs == \ifx\cs\empty is some 10% faster but
% probably not that noticeable in practice. An \ifvoidtoks might make sense but we
@@ -181,7 +181,6 @@
%D These are not needed any more now that we have wide screens (and bytes come
%D cheap).
-
\let\@EA \singleexpandafter
\let\@EAEAEA \doubleexpandafter
\let\@EAEAEAEAEAEA\tripleexpandafter
@@ -282,6 +281,15 @@
%D {\localnext} because we don't want clashes with \type
%D {\next}.
+\let\next \relax
+\let\nextnext \relax
+\let\nextnextnext \relax
+\let\nexttoken \relax
+\let\charactertoken\relax
+
+\let\m_syst_action_yes\relax
+\let\m_syst_action_nop\relax
+
\unexpanded\def\doifelsenextchar#1#2#3% #1 should not be {} !
{\let\charactertoken=#1% = needed here
\def\m_syst_action_yes{#2}%
@@ -1073,13 +1081,22 @@
\fi
\relax} % this one preserved the next {}
+% \def\syst_helpers_do_process_comma_list#1]#2%
+% {\global\advance\commalevel \plusone
+% \expandafter\def\csname\??nextcommalevel\the\commalevel\endcsname##1,%
+% {#2{##1}\syst_helpers_do_process_comma_item}%
+% \expandafter\syst_helpers_do_do_process_comma_item\gobbleoneargument#1,]\relax
+% \global\advance\commalevel \minusone }
+
\def\syst_helpers_do_process_comma_list#1]#2%
{\global\advance\commalevel \plusone
\expandafter\def\csname\??nextcommalevel\the\commalevel\endcsname##1,%
{#2{##1}\syst_helpers_do_process_comma_item}%
- \expandafter\syst_helpers_do_do_process_comma_item\gobbleoneargument#1,]\relax
+ \syst_helpers_do_do_process_comma_item_gobble#1,]\relax
\global\advance\commalevel \minusone }
+\def\syst_helpers_do_do_process_comma_item_gobble#1{\syst_helpers_do_do_process_comma_item}
+
%D One way of quitting a commalist halfway is:
\unexpanded\def\quitcommalist
@@ -3089,25 +3106,6 @@
\let\immediatemessage\clf_immediatemessage % {} mandate
-%D \macros
-%D {debuggerinfo}
-%D
-%D For debugging purposes we can enhance macros with the
-%D next alternative. Here \type{debuggerinfo} stands for both
-%D a macro accepting two arguments and a boolean (in fact a
-%D few macro's too).
-
-% \newif\ifdebuggerinfo
-%
-% \unexpanded\def\debuggerinfo#1#2%
-% {\ifdebuggerinfo
-% \writestatus{debugger}{#1:: #2}%
-% \fi}
-%
-% \ifdefined\writestatus \else \let\writestatus\normalwritestatus \fi
-
-% % % % % % % % % % % % % % % % % % % % % % % %
-
%D \macros
%D {rawgetparameters}
%D
@@ -6102,40 +6100,46 @@
%D \macros
%D {ScaledPointsToBigPoints,ScaledPointsToWholeBigPoints}
%D
-%D One characteristic of \POSTSCRIPT\ and \PDF\ is that both
-%D used big points (\TEX's bp). The next macros convert points
-%D and scaled points into big points.
+%D One characteristic of \POSTSCRIPT\ and \PDF\ is that both used big points (\TEX's
+%D bp). The next macros convert points and scaled points into big points. The magic
+%D factor $72/72.27$ can be found in most \TEX\ related books.
%D
%D \starttyping
%D \ScaledPointsToBigPoints {number} \target
%D \ScaledPointsToWholeBigPoints {number} \target
%D \stoptyping
%D
-%D The magic factor $72/72.27$ can be found in most \TEX\
-%D related books.
-% \PointsToBigPoints{10.53940pt}\test \test
-% \PointsToBigPoints{10.53941pt}\test \test
-% \PointsToBigPoints{10.53942pt}\test \test
+%D In pure \TEX:
-% \PointsToWholeBigPoints{10.53940pt}\test \test
-% \PointsToWholeBigPoints{10.53941pt}\test \test
-% \PointsToWholeBigPoints{10.53942pt}\test \test
+% \unexpanded\def\PointsToBigPoints#1#2%
+% {\edef#2{\withoutpt\the\dimexpr.996264\dimexpr#1\relax\relax}}
+%
+% \unexpanded\def\PointsToWholeBigPoints#1#2%
+% {\edef#2{\the\numexpr\dimexpr.996264\dimexpr#1\relax\relax/\maxcard\relax}}
+%
+% \unexpanded\def\ScaledPointsToBigPoints#1% % #2
+% {\PointsToBigPoints{\number#1\scaledpoint}} % {#2}
+%
+% \unexpanded\def\ScaledPointsToWholeBigPoints#1% % #2
+% {\PointsToWholeBigPoints{\number#1\scaledpoint}} % {#2}
-\unexpanded\def\PointsToBigPoints#1#2%
- {\edef#2{\withoutpt\the\dimexpr.996264\dimexpr#1\relax\relax}}
+%D This is slower but cleaner and more accurate too. The only place these are still
+%D used is in a few backend related macros.
-\unexpanded\def\PointsToWholeBigPoints#1#2%
- {\edef#2{\the\numexpr\dimexpr.996264\dimexpr#1\relax\relax/\maxcard\relax}}
+\let\tobigpoints \clf_tobigpoints
+\let\towholebigpoints\clf_towholebigpoints
-\unexpanded\def\ScaledPointsToBigPoints #1{\PointsToBigPoints {\number#1\scaledpoint}}
-\unexpanded\def\ScaledPointsToWholeBigPoints#1{\PointsToWholeBigPoints{\number#1\scaledpoint}}
+\unexpanded\def\PointsToBigPoints #1#2{\edef#2{\tobigpoints #1}} % can be avoided
+\unexpanded\def\PointsToWholeBigPoints #1#2{\edef#2{\towholebigpoints#1}} % can be avoided
+\unexpanded\def\ScaledPointsToBigPoints #1#2{\edef#2{\tobigpoints #1\scaledpoint}} % obsolete
+\unexpanded\def\ScaledPointsToWholeBigPoints#1#2{\edef#2{\towholebigpoints#1\scaledpoint}} % obsolete
%D \macros
%D {PointsToReal}
%D
-%D Points can be stripped from their suffix by using
-%D \type{\withoutpt}. The next macro enveloppes this macro.
+%D Points can be stripped from their suffix by using \type {\withoutpt}. The next
+%D macro enveloppes this macro.
%D
%D \starttyping
%D \PointsToReal {dimension} \target
@@ -7476,3 +7480,9 @@
% \appendtovaluelist{mylist}{mies}
%
% \showvalue{mylist}
+
+% \unexpanded\def\showtokenlist#1%
+% {\begingroup
+% \edef\tempstring{\the#1}%
+% \tx\ttbf\string#1: \tttf\meaning\tempstring
+% \endgroup}
diff --git a/tex/context/base/mkiv/syst-con.mkiv b/tex/context/base/mkiv/syst-con.mkiv
index 17c407819..afbdf8605 100644
--- a/tex/context/base/mkiv/syst-con.mkiv
+++ b/tex/context/base/mkiv/syst-con.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\registerctxluafile{syst-con}{1.001}
+\registerctxluafile{syst-con}{}
\unprotect
diff --git a/tex/context/base/mkiv/syst-ini.mkiv b/tex/context/base/mkiv/syst-ini.mkiv
index bbc856a5e..4b5cc616b 100644
--- a/tex/context/base/mkiv/syst-ini.mkiv
+++ b/tex/context/base/mkiv/syst-ini.mkiv
@@ -344,6 +344,11 @@
\newtoks \scratchtoksfour \newtoks \scratchtoksfive \newtoks \scratchtokssix
\newbox \scratchboxfour \newbox \scratchboxfive \newbox \scratchboxsix
+%D \macros
+%D {tempstring}
+
+\let\tempstring\empty
+
%D \macros
%D {scratchwidth, scratchheight, scratchdepth, scratchoffset, scratchdistance}
%D
@@ -1011,30 +1016,30 @@
% \chardef\pdfnotrapped = 256
% \chardef\pdfnoid = 512
-\def\pdftexversion {\numexpr\pdffeedback version}
-\def\pdftexrevision {\pdffeedback revision}
-\def\pdflastlink {\numexpr\pdffeedback lastlink}
-\def\pdfretval {\numexpr\pdffeedback retval}
-\def\pdflastobj {\numexpr\pdffeedback lastobj}
-\def\pdflastannot {\numexpr\pdffeedback lastannot}
-\def\pdfxformname {\numexpr\pdffeedback xformname}
-\def\pdfcreationdate {\pdffeedback creationdate}
-\def\pdffontname {\numexpr\pdffeedback fontname}
-\def\pdffontobjnum {\numexpr\pdffeedback fontobjnum}
-\def\pdffontsize {\dimexpr\pdffeedback fontsize}
-\def\pdfpageref {\numexpr\pdffeedback pageref}
-\def\pdfcolorstackinit {\pdffeedback colorstackinit}
-
-\let\pdfxform \saveboxresource
-\let\pdflastxform \lastsavedboxresourceindex
-\let\pdfrefxform \useboxresource
-\let\pdfximage \saveimageresource
-\let\pdflastximage \lastsavedimageresourceindex
-\let\pdflastximagepages \lastsavedimageresourcepages
-\let\pdfrefximage \useimageresource
-\let\pdfsavepos \savepos
-\let\pdflastxpos \lastxpos
-\let\pdflastypos \lastypos
+\def\pdftexversion {\numexpr\pdffeedback version\relax}
+\def\pdftexrevision {\pdffeedback revision}
+\def\pdflastlink {\numexpr\pdffeedback lastlink\relax}
+\def\pdfretval {\numexpr\pdffeedback retval\relax}
+\def\pdflastobj {\numexpr\pdffeedback lastobj\relax}
+\def\pdflastannot {\numexpr\pdffeedback lastannot\relax}
+\def\pdfxformname {\numexpr\pdffeedback xformname\relax}
+\def\pdfcreationdate {\pdffeedback creationdate}
+\def\pdffontname {\numexpr\pdffeedback fontname\relax}
+\def\pdffontobjnum {\numexpr\pdffeedback fontobjnum\relax}
+\def\pdffontsize {\dimexpr\pdffeedback fontsize\relax}
+\def\pdfpageref {\numexpr\pdffeedback pageref\relax}
+\def\pdfcolorstackinit {\pdffeedback colorstackinit}
+
+\let\pdfxform \saveboxresource
+\let\pdflastxform \lastsavedboxresourceindex
+\let\pdfrefxform \useboxresource
+\let\pdfximage \saveimageresource
+\let\pdflastximage \lastsavedimageresourceindex
+\let\pdflastximagepages \lastsavedimageresourcepages
+\let\pdfrefximage \useimageresource
+\let\pdfsavepos \savepos
+\let\pdflastxpos \lastxpos
+\let\pdflastypos \lastypos
\edef\pdfcompresslevel {\pdfvariable compresslevel} \pdfcompresslevel \plusnine
\edef\pdfobjcompresslevel {\pdfvariable objcompresslevel} \pdfobjcompresslevel \plusone
@@ -1050,6 +1055,7 @@
\edef\pdfinclusionerrorlevel {\pdfvariable inclusionerrorlevel} \pdfinclusionerrorlevel \zerocount
\edef\pdfgentounicode {\pdfvariable gentounicode} \pdfgentounicode \plusone
\edef\pdfpagebox {\pdfvariable pagebox} \pdfpagebox \zerocount
+\edef\pdfmajorversion {\pdfvariable majorversion} % \pdfmajorversion \plusone
\edef\pdfminorversion {\pdfvariable minorversion} \pdfminorversion \plusseven
\edef\pdfuniqueresname {\pdfvariable uniqueresname} \pdfuniqueresname \zerocount
\edef\pdfhorigin {\pdfvariable horigin} \pdfhorigin 1in
@@ -1095,16 +1101,28 @@
\def\modulonumber#1#2{\the\numexpr#2-((((#2+(#1/2))/#1)-1)*#1)\relax}
\def\dividenumber#1#2{\the\numexpr(#2-(#1/2))/#1\relax}
-\def \texenginename {LuaTeX}
-\edef\texengineversion{\dividenumber{100}\luatexversion.\modulonumber{100}\luatexversion.\luatexrevision}
+\ifdefined\texenginename \else
+ %edef\texenginename{luatex}
+ \edef\texenginename{\directlua{tex.print(LUATEXENGINE)}}
+\fi
+
+\ifdefined\texengineversion \else
+ %edef\texengineversion{\dividenumber{100}\luatexversion.\modulonumber{100}\luatexversion.\luatexrevision}
+ \edef\texengineversion{\directlua{tex.print(LUATEXVERSION)}}
+\fi
+
+\ifdefined\texenginefunctionality \else
+ \edef\texenginefunctionality{\directlua{tex.print(LUATEXFUNCTIONALITY)}}
+\fi
%D We have no reason not to enable this:
\savingvdiscards\plusone
-%D We only can set this one via directives (system.synctex).
+%D We only can set this one via directives (system.synctex) and we only support
+%D the context variant.
-\let\synctex\undefined \newcount\synctex
+\let\synctex\undefined \newcount\synctex \let\normalsynctex\synctex
%D We get rid of the funny \TEX\ offset defaults of one inch by setting them to zero.
@@ -1156,22 +1174,26 @@
%D It makes more sense to have these here:
-\let\normalsuperscript\Usuperscript
-\let\normalsubscript \Usubscript
-\let\normalstartimath \Ustartmath
-\let\normalstopimath \Ustopmath
-\let\normalstartdmath \Ustartdisplaymath
-\let\normalstopdmath \Ustopdisplaymath
+\let\normalsuperscript \Usuperscript
+\let\normalsubscript \Usubscript
+\let\normalnosuperscript\Unosuperscript
+\let\normalnosubscript \Unosubscript
+\let\normalstartimath \Ustartmath
+\let\normalstopimath \Ustopmath
+\let\normalstartdmath \Ustartdisplaymath
+\let\normalstopdmath \Ustopdisplaymath
%D For now:
\ifdefined\protrusionboundary \else \let\protrusionboundary\boundary \fi
\ifdefined\wordboundary \else \let\wordboundary \noboundary \fi
-\ifdefined\mathrulesfam \else \newcount\mathrulesfam \fi
-\ifdefined\mathrulesmode \else \newcount\mathrulesmode \fi
-\ifdefined\mathsurroundmode \else \newcount\mathsurroundmode \fi
-\ifdefined\mathitalicsmode \else \newcount\mathitalicsmode \fi
+\ifdefined\mathrulesfam \else \newcount\mathrulesfam \fi
+\ifdefined\mathrulesmode \else \newcount\mathrulesmode \fi
+\ifdefined\mathsurroundmode \else \newcount\mathsurroundmode \fi
+\ifdefined\mathitalicsmode \else \newcount\mathitalicsmode \fi
+\ifdefined\mathdelimitersmode \else \newcount\mathdelimitersmode \fi
+\ifdefined\mathscriptboxmode \else \newcount\mathscriptboxmode \fi
\ifdefined\hyphenpenaltymode \else \newcount\hyphenpenaltymode \fi
\ifdefined\automatichyphenpenalty \else \newcount\automatichyphenpenalty \fi
@@ -1181,4 +1203,10 @@
\ifdefined\explicitdiscretionary \else \let\explicitdiscretionary \- \fi
\ifdefined\automaticdiscretionary \else \def\automaticdiscretionary{\Uchar\exhyphenchar} \fi
+\ifdefined\mathpenaltiesmode \else \newcount\mathpenaltiesmode \fi
+\ifdefined\prebinoppenalty \else \newcount\prebinoppenalty \fi
+\ifdefined\prerelpenalty \else \newcount\prerelpenalty \fi
+
+\ifdefined\breakafterdirmode \else \newcount\breakafterdirmode \fi
+
\protect \endinput
diff --git a/tex/context/base/mkiv/syst-lua.lua b/tex/context/base/mkiv/syst-lua.lua
index ee3b8c327..35cd22123 100644
--- a/tex/context/base/mkiv/syst-lua.lua
+++ b/tex/context/base/mkiv/syst-lua.lua
@@ -6,8 +6,7 @@ if not modules then modules = { } end modules ['syst-lua'] = {
license = "see context related readme files"
}
-local find, match = string.find, string.match
-local tonumber = tonumber
+local find = string.find
local S, C, P, lpegmatch, lpegtsplitat = lpeg.S, lpeg.C, lpeg.P, lpeg.match, lpeg.tsplitat
commands = commands or { }
diff --git a/tex/context/base/mkiv/syst-lua.mkiv b/tex/context/base/mkiv/syst-lua.mkiv
index b5001ab62..ca5f9679f 100644
--- a/tex/context/base/mkiv/syst-lua.mkiv
+++ b/tex/context/base/mkiv/syst-lua.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\registerctxluafile{syst-lua}{1.001}
+\registerctxluafile{syst-lua}{}
\unprotect
diff --git a/tex/context/base/mkiv/tabl-frm.mkiv b/tex/context/base/mkiv/tabl-frm.mkiv
index 639d6f06d..c0f4b7cd1 100644
--- a/tex/context/base/mkiv/tabl-frm.mkiv
+++ b/tex/context/base/mkiv/tabl-frm.mkiv
@@ -134,6 +134,56 @@
{\framedparameter\c!after
\endgroup}
+% \unexpanded\def\startframedrow
+% {\advance\c_tabl_framed_r\plusone
+% \c_tabl_framed_c\zerocount
+% \d_tabl_framed_h\zeropoint
+% \bgroup
+% \edef\currentframed{\number\c_tabl_framed_r}%
+% \edef\currentframed
+% {\??framedtablerow\currentframedtable
+% \ifcsname\??framedtablerow\currentframedtable:\currentframed\endcsname
+% :\currentframed
+% \else\ifcsname\??framedtablerow\currentframedtable:\v!each\endcsname
+% :\v!each
+% \fi\fi}%
+% \dosingleempty\pack_framed_start_framed_nop_indeed}
+%
+% \unexpanded\def\stopframedrow
+% {\dofastloopcs\c_tabl_framed_c\tabl_framed_flush_row
+% \stopframed
+% \nointerlineskip
+% \vskip\zeropoint\relax
+% \framedparameter\c!inbetween}
+%
+% \unexpanded\def\tabl_framed_flush_row
+% {\vpack to \d_tabl_framed_h{\flushbox\??framedtable{\number\fastloopindex}\vfill}%
+% \ifdim\d_tabl_framed_d=\zeropoint\else\kern\d_tabl_framed_d\fi}
+%
+% \unexpanded\def\startframedcell
+% {\advance\c_tabl_framed_c\plusone
+% \setbox\b_tabl_framed\hpack\bgroup
+% %\bgroup
+% \edef\currentframed{\number\c_tabl_framed_c}%
+% \edef\currentframed
+% {\??framedtablecolumn\currentframedtable
+% \ifcsname\??framedtablecolumn\currentframedtable:\currentframed\endcsname
+% :\currentframed
+% \else\ifcsname\??framedtablecolumn\currentframedtable:\v!each\endcsname
+% :\v!each
+% \fi\fi}%
+% \dosingleempty\pack_framed_start_framed_nop_indeed}
+%
+% \unexpanded\def\stopframedcell
+% {\stopframed
+% %\egroup
+% \ifdim\ht\b_tabl_framed>\d_tabl_framed_h
+% \d_tabl_framed_h\ht\b_tabl_framed
+% \fi
+% \savebox\??framedtable{\number\c_tabl_framed_c}{\box\b_tabl_framed}}
+
+% a two pass variant that deals with the height .. so no catcode changes here
+
\unexpanded\def\startframedrow
{\advance\c_tabl_framed_r\plusone
\c_tabl_framed_c\zerocount
@@ -160,6 +210,52 @@
{\vpack to \d_tabl_framed_h{\flushbox\??framedtable{\number\fastloopindex}\vfill}%
\ifdim\d_tabl_framed_d=\zeropoint\else\kern\d_tabl_framed_d\fi}
+\newcount\c_tabl_framed_pass
+
+\let\stopframedrow\relax
+
+\unexpanded\def\startframedrow#1\stopframedrow
+ {\advance\c_tabl_framed_r\plusone
+ \startframedrow_one#1\stopframedrow_one
+ \startframedrow_two#1\stopframedrow_two}
+
+\def\startframedrow_one
+ {\bgroup
+ \c_tabl_framed_pass\plusone
+ \c_tabl_framed_c\zerocount
+ \d_tabl_framed_h\zeropoint
+ \settrialtypesetting
+ \gobblesingleempty}
+
+\unexpanded\def\stopframedrow_one
+ {\normalexpanded{\egroup\d_tabl_framed_h\the\d_tabl_framed_h\relax}}
+
+\def\startframedrow_two
+ {\bgroup
+ \c_tabl_framed_c\zerocount
+ \c_tabl_framed_pass\plustwo
+ \edef\currentframed{\number\c_tabl_framed_r}%
+ \edef\currentframed
+ {\??framedtablerow\currentframedtable
+ \ifcsname\??framedtablerow\currentframedtable:\currentframed\endcsname
+ :\currentframed
+ \else\ifcsname\??framedtablerow\currentframedtable:\v!each\endcsname
+ :\v!each
+ \fi\fi}%
+ \dosingleempty\pack_framed_start_framed_nop_indeed}
+
+\unexpanded\def\stopframedrow_two
+ {\dofastloopcs\c_tabl_framed_c\tabl_framed_flush_row
+ \stopframed
+ \nointerlineskip
+ \vskip\zeropoint\relax
+ \framedparameter\c!inbetween}
+
+\unexpanded\def\tabl_framed_flush_row_two
+ {\vpack to \d_tabl_framed_h{\flushbox\??framedtable{\number\fastloopindex}\vfill}%
+ \ifdim\d_tabl_framed_d=\zeropoint\else\kern\d_tabl_framed_d\fi}
+
+
\unexpanded\def\startframedcell
{\advance\c_tabl_framed_c\plusone
\setbox\b_tabl_framed\hpack\bgroup
@@ -172,15 +268,26 @@
\else\ifcsname\??framedtablecolumn\currentframedtable:\v!each\endcsname
:\v!each
\fi\fi}%
+ \ifcase\c_tabl_framed_pass
+ \or
+ \letframedparameter\c!background\empty
+ \letframedparameter\c!frame\v!off
+ \or
+ \letframedparameter\c!height\d_tabl_framed_h
+ \fi
\dosingleempty\pack_framed_start_framed_nop_indeed}
\unexpanded\def\stopframedcell
{\stopframed
%\egroup
- \ifdim\ht\b_tabl_framed>\d_tabl_framed_h
- \d_tabl_framed_h\ht\b_tabl_framed
- \fi
- \savebox\??framedtable{\number\c_tabl_framed_c}{\box\b_tabl_framed}}
+ \ifcase\c_tabl_framed_pass
+ \or
+ \ifdim\ht\b_tabl_framed>\d_tabl_framed_h
+ \d_tabl_framed_h\ht\b_tabl_framed
+ \fi
+ \else
+ \savebox\??framedtable{\number\c_tabl_framed_c}{\box\b_tabl_framed}%
+ \fi}
\protect \endinput
diff --git a/tex/context/base/mkiv/tabl-mis.mkiv b/tex/context/base/mkiv/tabl-mis.mkiv
index 8db31b31d..b2af1993e 100644
--- a/tex/context/base/mkiv/tabl-mis.mkiv
+++ b/tex/context/base/mkiv/tabl-mis.mkiv
@@ -106,10 +106,11 @@
\installcorenamespace{paragraphs}
-\installcommandhandler \??paragraphs {paragraphs} \??paragraphs
+\installframedcommandhandler \??paragraphs {paragraphs} \??paragraphs
\setupparagraphs
[\c!n=3,
+ \c!offset=\zeropoint,
\c!before=\blank,
\c!after=\blank,
\c!distance=\emwidth,
@@ -123,6 +124,7 @@
\c!rulecolor=,
\c!style=,
\c!color=,
+ \c!frame=\v!off,
\c!top=\vss,
\c!bottom=\vfill]
@@ -163,13 +165,14 @@
\unexpanded\def\startparagraphs[#1]% quite slow
{\bgroup % (1)
\edef\currentparagraphs{#1}%
- \paragraphsparameter\c!before
+ % \paragraphsparameter\c!before
\edef\p_width{\paragraphsparameter\c!width}%
\ifx\p_width\empty
\d_typo_paragraphs_width\availablehsize
\else
\d_typo_paragraphs_width\p_width\relax
\fi
+ \advance\d_typo_paragraphs_width-2\dimexpr\paragraphsparameter\c!offset\relax
\c_typo_paragraphs_max\paragraphsparameter\c!n\relax
\d_typo_paragraphs_auto\d_typo_paragraphs_width\relax
\scratchcounter\zerocount
@@ -191,9 +194,14 @@
\parindent\zeropoint
\c_typo_paragraphs_n\zerocount
\let\\=\typo_paragraphs_next % downward compatible
- \dontleavehmode\hbox
+ \edef\p_offset{\paragraphsparameter\c!offset}%
+ \doifelsedimension\p_offset
+ {\scratchoffset\p_offset}%
+ {\scratchoffset\zeropoint}%
+ \setbox\scratchbox\hpack
\bgroup % (2)
\forgetall
+ \advance\hsize-2\scratchoffset
\let\typo_paragraphs_start_cell\typo_paragraphs_start_cell_indeed
\let\typo_paragraphs_stop_cell \typo_paragraphs_stop_cell_indeed
\typo_paragraphs_start_cell_indeed}
@@ -201,6 +209,9 @@
\unexpanded\def\stopparagraphs
{\stopparagraphscell
\egroup % (2)
+ \letparagraphsparameter\c!align\v!flushleft % normal
+ \paragraphsparameter\c!before
+ \dontleavehmode\inheritedparagraphsframed{\box\scratchbox}%
\paragraphsparameter\c!after
\egroup} % (1)
diff --git a/tex/context/base/mkiv/tabl-ntb.mkiv b/tex/context/base/mkiv/tabl-ntb.mkiv
index 276f85d31..9acf2ef36 100644
--- a/tex/context/base/mkiv/tabl-ntb.mkiv
+++ b/tex/context/base/mkiv/tabl-ntb.mkiv
@@ -331,6 +331,20 @@
\def\tabl_ntb_col_doif #1#2{\ifcsname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\firstofoneargument \else\expandafter\gobbleoneargument \fi}
\def\tabl_ntb_col_doifnot #1#2{\ifcsname\??naturaltablecol\m_tabl_tbl_level:\number#1:\number#2\endcsname\expandafter\gobbleoneargument \else\expandafter\firstofoneargument \fi}
+%D If we ever run into memory issues we can do:
+%
+% \def\tabl_ntb_let_tag#1#2#3%
+% {\ifx#3\c_tabl_ntb_none\else
+% \expandafter\let\csname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname#3%
+% \fi}
+%
+% \def\tabl_ntb_get_tag#1#2%
+% {\ifcsname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname
+% \lastnamedcs
+% \else
+% \c_tabl_ntb_none
+% \fi}
+
% not used
%
% \def\tabl_ntb_tag_state#1#2{\ifcsname\??naturaltabletag\m_tabl_tbl_level:\number#1:\number#2\endcsname\zerocount\else\plusone\fi}
@@ -383,7 +397,12 @@
\newif\iftightTBLrowspan \tightTBLrowspantrue
\newif\iftightTBLcolspan \tightTBLcolspanfalse
-\newif\iftraceTABLE \traceTABLEfalse
+\newconditional \c_tabl_ntb_trace_widths
+
+\installtextracker
+ {tables.natural.widths}
+ {\settrue \c_tabl_ntb_trace_widths}
+ {\setfalse\c_tabl_ntb_trace_widths}
% so far
@@ -1446,8 +1465,8 @@
% case: nc=maxcolumns
\else
\scratchdimen\tabl_ntb_get_hei{#1}%
- \setbox\scratchbox\hbox
- {\lower\ht\scratchbox\hbox{\raise\scratchdimen\box\scratchbox}}%
+ \setbox\scratchbox\hpack
+ {\lower\ht\scratchbox\hpack{\raise\scratchdimen\box\scratchbox}}%
\ht\scratchbox\scratchdimen
\fi
\dp\scratchbox\zeropoint
@@ -1485,19 +1504,21 @@
\let\tabl_ntb_preroll\relax
-\def\tabl_ntb_table_get_max_width#1#2%
- {#1\wd#2\relax}
+\def\tabl_ntb_table_get_max_width
+ {\scratchdimen\wd\scratchbox\relax}
-% first needs testing (in projects):
-%
-% \def\tabl_ntb_table_get_max_width#1#2%
-% {#1\zeropoint
-% \dorecurse\c_tabl_ntb_maximum_col
-% {\advance#1\tabl_ntb_get_wid\recurselevel
-% \advance#1\tabl_ntb_get_dis\recurselevel}%
-% \ifdim#1<\wd#2\relax
-% #1\wd#2\relax
-% \fi}
+% enable dper 2018-02-22
+
+\def\tabl_ntb_table_get_max_width_step
+ {\advance\scratchdimen\tabl_ntb_get_wid\fastloopindex
+ \advance\scratchdimen\tabl_ntb_get_dis\fastloopindex}
+
+\def\tabl_ntb_table_get_max_width
+ {\scratchdimen\zeropoint
+ \dofastloopcs\c_tabl_ntb_maximum_col\tabl_ntb_table_get_max_width_step
+ \ifdim\scratchdimen<\wd\scratchbox\relax
+ \scratchdimen\wd\scratchbox\relax
+ \fi}
\def\tabl_ntb_table_stop
{\forgetall % new, here see narrower-004.tex
@@ -1534,7 +1555,7 @@
\fi
\tabl_ntb_let_dis\c_tabl_ntb_maximum_col\zeropoint
%
- \tabl_ntb_table_get_max_width\scratchdimen\scratchbox
+ \tabl_ntb_table_get_max_width % \scratchdimen\scratchbox
%
\ifautoTBLspread
% experimental, stretch non fixed cells to \hsize
@@ -1656,10 +1677,10 @@
\ifmultipleTBLheads
\dorecurse\c_tabl_ntb_n_of_head_lines
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
- \setbox\tsplithead\vbox{\unvcopy\tsplithead\unvcopy\scratchbox}}%
+ \setbox\tsplithead\vbox{\unvcopy\tsplithead\unvcopy\scratchbox}}% \vpack ?
\dorecurse\c_tabl_ntb_n_of_next_lines
{\setbox\scratchbox\vsplit\tsplitcontent to \lineheight
- \setbox\tsplitnext\vbox{\unvcopy\tsplitnext\unvcopy\scratchbox}}%
+ \setbox\tsplitnext\vbox{\unvcopy\tsplitnext\unvcopy\scratchbox}}% \vpack ?
\fi
\edef\p_spaceinbetween{\naturaltablelocalparameter\c!spaceinbetween}%
\ifx\p_spaceinbetween\empty\else
@@ -1682,7 +1703,7 @@
\def\tabl_ntb_check_widths_two{\tabl_ntb_check_widths_indeed\plusone } % 1 = real run
\def\tabl_ntb_check_widths_indeed#1%
- {\iftraceTABLE\tabl_ntb_show_widths B#1\fi
+ {\ifconditional\c_tabl_ntb_trace_widths\tabl_ntb_show_widths B#1\fi
\!!counta\zerocount
\!!dimena\dimexpr
\hsize
@@ -1715,7 +1736,7 @@
\fi
\fi
\fi}%
- \iftraceTABLE\tabl_ntb_show_widths M#1\fi
+ \ifconditional\c_tabl_ntb_trace_widths\tabl_ntb_show_widths M#1\fi
\ifcase\!!counta \else \divide\!!dimena \!!counta \fi
\dorecurse\c_tabl_ntb_maximum_col
{\scratchdimen\tabl_ntb_get_wid\recurselevel\relax
@@ -1728,7 +1749,7 @@
\tabl_ntb_set_wid\recurselevel{\the\!!dimena}%
\fi
\fi}%
- \iftraceTABLE\tabl_ntb_show_widths E#1\fi}
+ \ifconditional\c_tabl_ntb_trace_widths\tabl_ntb_show_widths E#1\fi}
\def\tabl_ntb_check_heights_one_indeed
{\!!countb\tabl_ntb_get_row\c_tabl_ntb_current_row_three\c_tabl_ntb_current_col_three\relax
diff --git a/tex/context/base/mkiv/tabl-tab.mkiv b/tex/context/base/mkiv/tabl-tab.mkiv
index 76f7f76c3..54b18a385 100644
--- a/tex/context/base/mkiv/tabl-tab.mkiv
+++ b/tex/context/base/mkiv/tabl-tab.mkiv
@@ -1605,11 +1605,11 @@
\setbox\tsplitcontent\box#1%
\ifconditional\c_tabl_table_repeat_head \ifconditional\hassometablehead
\setbox\tsplithead\vsplit\tsplitcontent to \lineheight
- \setbox\tsplithead\vbox{\unvbox\tsplithead}%
+ \setbox\tsplithead\vbox{\unvbox\tsplithead}% \vpack ?
\fi \fi
\ifconditional\c_tabl_table_repeat_tail \ifconditional\hassometabletail
\setbox\tsplittail\vsplit\tsplitcontent to \lineheight
- \setbox\tsplittail\vbox{\unvbox\tsplittail}%
+ \setbox\tsplittail\vbox{\unvbox\tsplittail}% \vpack ?
\fi \fi
\ifinsidefloat\else
\def\tsplitbeforeresult{\startbaselinecorrection}%
diff --git a/tex/context/base/mkiv/tabl-tbl.mkiv b/tex/context/base/mkiv/tabl-tbl.mkiv
index b21771009..7a0d2c8a8 100644
--- a/tex/context/base/mkiv/tabl-tbl.mkiv
+++ b/tex/context/base/mkiv/tabl-tbl.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{tabl-tbl.lua}{1.001} % experiment
+\registerctxluafile{tabl-tbl}{} % experiment
%D I can probably reimplement this using a \LUATEX\ combination
%D but it does not pay of in development time. If I need something
@@ -168,6 +168,7 @@
\newcount \c_tabl_tabulate_plines_min
\newcount \c_tabl_tabulate_plines_max
\newcount \c_tabl_tabulate_max_colorcolumn
+\newcount \c_tabl_tabulate_max_vrulecolumn
\newcount \c_tabl_tabulate_repeathead
\newcount \c_tabl_tabulate_noflines
\newcount \c_tabl_tabulate_totalnoflines
@@ -216,6 +217,7 @@
\newconstant \c_tabl_tabulate_pass
\newconstant \c_tabl_tabulate_type
+\newconstant \c_tabl_tabulate_kind % 1=strong 2=equals
\newconstant \c_tabl_tabulate_splitlinemode \c_tabl_tabulate_splitlinemode\plusone
\newconstant \c_tabl_tabulate_colorspan
\newconstant \c_tabl_tabulate_localcolorspan
@@ -225,7 +227,6 @@
\let \m_tabl_tabulate_separator_factor \empty % fraction
-\newif \iftracetabulate % will become a tracker
\newtoks \everytabulatepar % where used ?
\newtoks \everytabulate % public ?
@@ -243,6 +244,7 @@
\installcorenamespace{tabulateheader}
\installcorenamespace{tabulatealigning}
\installcorenamespace{tabulatepreamble}
+\installcorenamespace{tabulatevrule}
\installcorenamespace{tabulatehead}
\installcorenamespace{tabulatefoot}
@@ -262,12 +264,6 @@
\expandafter\tabl_tabulate_initialize_boxes_step
\fi}
-% \def\tabl_tabulate_initialize_box#1% also used elsewhere
-% {\ifcsname\??tabulatebox\number#1\endcsname
-% \tabl_tabulate_initialize_box_yes#1%
-% \else
-% \tabl_tabulate_initialize_box_nop#1%
-% \fi}
\def\tabl_tabulate_initialize_box#1% also used elsewhere
{\ifcsname\??tabulatebox\number#1\endcsname
\tabl_tabulate_initialize_box_yes
@@ -275,7 +271,6 @@
\tabl_tabulate_initialize_box_nop#1%
\fi}
-%def\tabl_tabulate_initialize_box_yes#1{\global \setbox\csname\??tabulatebox\number#1\endcsname\emptybox}
\def\tabl_tabulate_initialize_box_yes {\global \setbox\lastnamedcs\emptybox}
\def\tabl_tabulate_initialize_box_nop#1{\expandafter\newbox\csname\??tabulatebox\number#1\endcsname}
@@ -312,11 +307,16 @@
\kern-\linewidth
\tabl_tabulate_break_no}
+\installtextracker
+ {tables.tabulate.breaks}
+ {\let\tabl_tabulate_break_no_tracer\tabl_tabulate_nobreak_inject_tracer}
+ {\let\tabl_tabulate_break_no_tracer\donothing}
+
+\let\tabl_tabulate_break_no_tracer\donothing
+
\def\tabl_tabulate_nobreak_inject_indeed
{\tabl_tabulate_break_no
- \iftracetabulate
- \tabl_tabulate_nobreak_inject_tracer
- \fi}
+ \tabl_tabulate_break_no_tracer}
\def\tabl_tabulate_nobreak_inject
{\tabulatenoalign{\tabl_tabulate_nobreak_inject_indeed}}
@@ -1037,6 +1037,7 @@
\appendtoks
\setuevalue{\e!start\currenttabulation}{\tabl_start_defined[\currenttabulation]}%
+ \letvalue{\e!stop\currenttabulation}\relax
\letvalue{\??tabulatehead\currenttabulation}\empty
\letvalue{\??tabulatefoot\currenttabulation}\empty
\to \everydefinetabulation
@@ -1076,7 +1077,7 @@
%\expandafter\ifx\lastnamedcs\empty
\let\tabl_tabulate_insert_foot\empty
\else
- \let\tabl_tabulate_insert_foot\tabl_tabulate_insert_head_content
+ \let\tabl_tabulate_insert_foot\tabl_tabulate_insert_foot_content
\fi
\else
\let\tabl_tabulate_insert_foot\empty
@@ -1200,7 +1201,7 @@
\unexpanded\def\tabl_tabulate_start_ignore
{\em Nested tabulate is not (yet) supported.\relax
- \expandafter\gobbleuntil\csname\e!stop\v!tabulate\endcsname}
+ \expandafter\gobbleuntil\csname\ifconditional\c_tabl_generic stoptabulate\else\e!stop\v!tabulate\fi\endcsname}
\appendtoks
\letvalue{\e!start\v!tabulate}\tabl_tabulate_start_ignore % only the main one
@@ -1341,12 +1342,18 @@
\d_tabl_tabulate_indent\dimexpr\leftskip+\hangindent\ifx\p_indenting\v!yes+\parindent\fi\relax
\global\c_tabl_tabulate_column\zerocount
\processcontent
- {\e!stop\ifx\currenttabulationparent\empty\v!tabulate\else\currenttabulationparent\fi}
+ {\ifconditional\c_tabl_generic stoptabulate\else\e!stop\ifx\currenttabulationparent\empty\v!tabulate\else\currenttabulationparent\fi\fi}
\tabl_tabulate_insert_body
\tabl_tabulate_process}
\def\tabulateEQ
- {\ifconditional\c_tabl_tabulate_firstflushed\else\tabulationparameter{EQ}\fi
+ {\ifconditional\c_tabl_tabulate_firstflushed\else
+ \dostarttaggedchained\t!ignore\empty\empty
+ \dostarttagged\t!ignore\empty
+ \tabulationparameter{EQ}%
+ \dostoptagged
+ \dostoptagged
+ \fi
\global\setfalse\c_tabl_tabulate_equal}
% The next ones will be token registers
@@ -1420,55 +1427,106 @@
\doifelsefastoptionalcheck{\tabl_tabulate_set_color_column_yes#1}{\tabl_tabulate_set_color_column_nop#1}}
\def\tabl_tabulate_set_color_column_nop
- {\tabl_tabulate_column_normal}
+ {\tabl_tabulate_column_normal\zerocount}
\def\tabl_tabulate_set_color_column_yes#1[#2]%
{\xdef\m_tabl_tabulate_color_local{#2}%
- \tabl_tabulate_column_normal#1}
+ \tabl_tabulate_column_normal\zerocount#1}
% normal columns:
-\def\tabl_tabulate_column_normal#1%
+% \def\tabl_tabulate_column_normal#1#2%
+% {\unskip
+% \aligntab
+% \ifconditional\c_tabl_tabulate_equal\tabulateequalpos\else\tabulatenormalpos\fi
+% \aligntab
+% \global\c_tabl_tabulate_kind#1%
+% \global\c_tabl_tabulate_type#2%
+% \aligntab}
+
+\def\tabl_tabulate_column_normal#1#2%
{\unskip
\aligntab
\ifconditional\c_tabl_tabulate_equal\tabulateequalpos\else\tabulatenormalpos\fi
+ \ifnum\c_tabl_tabulate_column>\c_tabl_tabulate_max_vrulecolumn\else
+ \tabl_tabulate_column_vrule_setup
+ \fi
\aligntab
- \global\c_tabl_tabulate_type#1%
+ \global\c_tabl_tabulate_kind#1%
+ \global\c_tabl_tabulate_type#2%
\aligntab}
% equal columns
-\def\tabl_tabulate_column_equal#1%
+\def\tabl_tabulate_column_equal#1#2%
{\unskip
\aligntab
\tabulateequalpos
- \aligntab\global\c_tabl_tabulate_type#1%
+ \aligntab
+ \global\c_tabl_tabulate_kind#1%
+ \global\c_tabl_tabulate_type#2%
\aligntab}
% ruled columns
-\def\tabl_tabulate_column_vruled#1%
+\def\tabl_tabulate_column_vruled_preset
+ {\global\let\m_tabl_tabulate_vrule_color_local\m_tabl_tabulate_vrule_color_default
+ \global\d_tabl_tabulate_vrulethickness_local\d_tabl_tabulate_vrulethickness_default}
+
+\def\tabl_tabulate_column_vruled#1#2%
{\unskip % 0-n
-% \ifnum\c_tabl_tabulate_column=\plusone
-% \global\c_tabl_tabulate_has_rule_spec_first\plusone
-% \else\ifnum\c_tabl_tabulate_column=\c_tabl_tabulate_nofcolumns
-% \global\c_tabl_tabulate_has_rule_spec_last\plusone
-% \fi\fi
- \global\let\m_tabl_tabulate_vrule_color_local\m_tabl_tabulate_vrule_color_default
- \global\d_tabl_tabulate_vrulethickness_local\d_tabl_tabulate_vrulethickness_default
- \doifelsefastoptionalcheck{\tabl_tabulate_column_vruled_yes#1}{\tabl_tabulate_column_vruled_nop#1}}
+ %\ifnum\c_tabl_tabulate_column=\plusone
+ % \global\c_tabl_tabulate_has_rule_spec_first\plusone
+ %\else\ifnum\c_tabl_tabulate_column=\c_tabl_tabulate_nofcolumns
+ % \global\c_tabl_tabulate_has_rule_spec_last\plusone
+ %\fi\fi
+ \ifnum\c_tabl_tabulate_column>\c_tabl_tabulate_max_vrulecolumn
+ \global\c_tabl_tabulate_max_vrulecolumn\c_tabl_tabulate_column
+ \fi
+ \doifelsefastoptionalcheck{\tabl_tabulate_column_vruled_yes#1#2}{\tabl_tabulate_column_vruled_nop#1#2}}
+
+\def\tabl_tabulate_column_vrule_setup
+ {\begincsname\??tabulatevrule\the\c_tabl_tabulate_column\endcsname}
\def\tabl_tabulate_column_vruled_nop
- {\tabl_tabulate_column_normal}
+ {\expandafter\glet\csname\??tabulatevrule\the\c_tabl_tabulate_column\endcsname\tabl_tabulate_column_vruled_preset
+ \tabl_tabulate_column_normal}
\def\tabl_tabulate_column_vruled_step#1%
{\doifelsenumber{#1}
{\global\d_tabl_tabulate_vrulethickness_local#1\d_tabl_tabulate_vrulethickness_default}
{\xdef\m_tabl_tabulate_vrule_color_local{#1}}}
-\def\tabl_tabulate_column_vruled_yes#1[#2]%
- {\rawprocesscommalist[#2]\tabl_tabulate_column_vruled_step
- \tabl_tabulate_column_normal#1}
+\def\tabl_tabulate_column_vruled_yes#1#2[#3]%
+ {\expandafter\gdef\csname\??tabulatevrule\the\c_tabl_tabulate_column\endcsname
+ {\tabl_tabulate_column_vruled_preset
+ \rawprocesscommalist[#3]\tabl_tabulate_column_vruled_step}%
+ \tabl_tabulate_column_normal#1#2}
+
+\def\tabl_tabulate_vrule_reset
+ {\ifcase\c_tabl_tabulate_max_vrulecolumn\else
+ \tabl_tabulate_vrule_reset_indeed
+ \fi}
+
+\def\tabl_tabulate_vrule_reset_indeed
+ {\dofastloopcs\c_tabl_tabulate_max_vrulecolumn\tabl_tabulate_vrule_reset_step
+ \global\c_tabl_tabulate_max_vrulecolumn\zerocount}
+
+\def\tabl_tabulate_vrule_reset_step % undefined or relax
+ {\global\expandafter\let\csname\??tabulatevrule\the\fastloopindex\endcsname\undefined}
+
+\appendtoks
+ \tabl_tabulate_vrule_reset
+\to \t_tabl_tabulate_every_after_row
+
+% sometimes more efficient:
+%
+% \def\tabl_tabulate_column_vruled_yes#1#2[#3]%
+% {\rawprocesscommalist[#3]\tabl_tabulate_column_vruled_step
+% \expandafter\xdef\csname\??tabulatevrule\the\c_tabl_tabulate_column\endcsname
+% {\global\d_tabl_tabulate_vrulethickness_local\the\d_tabl_tabulate_vrulethickness_default
+% \noexpand\xdef\noexpand\m_tabl_tabulate_vrule_color_local{\m_tabl_tabulate_vrule_color_local}}%
+% \tabl_tabulate_column_normal#1#2}
\def\tabl_tabulate_column_vruled_normal
{\vrule\s!width\d_tabl_tabulate_vrulethickness\relax}
@@ -1506,7 +1564,7 @@
% auto columns
\def\tabl_tabulate_column_inject_auto
- {\tabl_tabulate_column_normal\zerocount
+ {\tabl_tabulate_column_normal\zerocount\zerocount
\ifnum\c_tabl_tabulate_column>\c_tabl_tabulate_columns\relax
\expandafter\NR
\else
@@ -1633,7 +1691,7 @@
\ifnum\c_tabl_tabulate_column>\c_tabl_tabulate_max_colorcolumn
\global\c_tabl_tabulate_max_colorcolumn\c_tabl_tabulate_column
\fi
- \setxvalue{\??tabulatecolor\the\c_tabl_tabulate_column}{#1}%
+ \global\expandafter\xdef\csname\??tabulatecolor\the\c_tabl_tabulate_column\endcsname{#1}%
\hbox \thealignbackgroundcolorattr{#1}{}% pack ?
\endgroup}
@@ -1802,22 +1860,22 @@
% so far
-\unexpanded\def\tabl_tabulate_VL_first{\tabl_tabulate_column_vruled\zerocount}
-\unexpanded\def\tabl_tabulate_NC_first{\tabl_tabulate_column_normal\zerocount}
-\unexpanded\def\tabl_tabulate_RC_first{\tabl_tabulate_column_normal\plusone}
-\unexpanded\def\tabl_tabulate_HC_first{\tabl_tabulate_column_normal\plustwo}
-\unexpanded\def\tabl_tabulate_EQ_first{\tabl_tabulate_column_equal \zerocount}
-\unexpanded\def\tabl_tabulate_RQ_first{\tabl_tabulate_column_equal \plusone}
-\unexpanded\def\tabl_tabulate_HQ_first{\tabl_tabulate_column_equal \plustwo}
+\unexpanded\def\tabl_tabulate_VL_first{\tabl_tabulate_column_vruled\zerocount\zerocount}
+\unexpanded\def\tabl_tabulate_NC_first{\tabl_tabulate_column_normal\zerocount\zerocount}
+\unexpanded\def\tabl_tabulate_RC_first{\tabl_tabulate_column_normal\zerocount\plusone}
+\unexpanded\def\tabl_tabulate_HC_first{\tabl_tabulate_column_normal\zerocount\plustwo}
+\unexpanded\def\tabl_tabulate_EQ_first{\tabl_tabulate_column_equal \plustwo \zerocount}
+\unexpanded\def\tabl_tabulate_RQ_first{\tabl_tabulate_column_equal \zerocount\plusone}
+\unexpanded\def\tabl_tabulate_HQ_first{\tabl_tabulate_column_equal \zerocount\plustwo}
%unexpanded\def\tabl_tabulate_NG_first{\NC\tabl_tabulate_charalign}
%unexpanded\def\tabl_tabulate_NG_first{\NC}
%unexpanded\def\tabl_tabulate_NN_first{\NC\tabl_tabulate_digits} % new, undocumented, test first
%unexpanded\def\tabl_tabulate_ND_first{\NC\tabl_tabulate_digits} % same, for old times sake
-\unexpanded\def\tabl_tabulate_NG_first{\tabl_tabulate_column_normal\zerocount}
-\unexpanded\def\tabl_tabulate_NN_first{\tabl_tabulate_column_normal\zerocount\tabl_tabulate_digits} % new, undocumented, test first
-\unexpanded\def\tabl_tabulate_ND_first{\tabl_tabulate_column_normal\zerocount\tabl_tabulate_digits} % same, for old times sake
+\unexpanded\def\tabl_tabulate_NG_first{\tabl_tabulate_column_normal\zerocount\zerocount}
+\unexpanded\def\tabl_tabulate_NN_first{\tabl_tabulate_column_normal\zerocount\zerocount\tabl_tabulate_digits} % new, undocumented, test first
+\unexpanded\def\tabl_tabulate_ND_first{\tabl_tabulate_column_normal\zerocount\zerocount\tabl_tabulate_digits} % same, for old times sake
\unexpanded\def\tabl_tabulate_NR_first {\tabl_tabulate_NR_common\conditionaltrue \tabl_tabulate_check_penalties} % next row
\unexpanded\def\tabl_tabulate_NB_first {\tabl_tabulate_NR_common\conditionaltrue \tabl_tabulate_nobreak_inject } % next row no break
@@ -1830,42 +1888,78 @@
\unexpanded\def\tabl_tabulate_CM_first{\global\c_tabl_tabulate_localcolorspan\plustwo \tabl_tabulate_set_color_column\zerocount}
\unexpanded\def\tabl_tabulate_CR_first{\global\c_tabl_tabulate_localcolorspan\plusthree\tabl_tabulate_set_color_column\zerocount}
+%D Sort of special:
+%D
+%D \startbuffer
+%D \startitemize[n]
+%D \starttabulate[|||||]
+%D \NC p \NC \itemtag \NC q \NC r \NC \NR
+%D \NC p \NC \itemtag \NC q \NC r \NC \NR
+%D \NC p \NC \itemtag \NC q \NC r \NC \NR
+%D \NC p \NC \itemtag \NC q \NC r \NC \NR
+%D \stoptabulate
+%D \stopitemize
+%D
+%D \startitemize[n]
+%D \starttabulate[|||||]
+%D \NI b \NC c \NC d \NC \NR
+%D \NC a \NI c \NC d \NC \NR
+%D \NC a \NC b \NI d \NC \NR
+%D \NC a \NC b \NC c \NI \NR
+%D \stoptabulate
+%D \stopitemize
+%D \stopbuffer
+%D
+%D \typebuffer \getbuffer
+
+\let\NI\relax
+
+\unexpanded\def\tabl_tabulate_NI_first{\doifelsefastoptionalcheck\tabl_tbl_NI_yes\tabl_tbl_NI_nop}
+
+\def\tabl_tbl_NI_yes[#1]{\NC \itemtag[#1]\NC}
+\def\tabl_tbl_NI_nop {\NC \itemtag \NC}
+
%D The following shortcut is handy for tables where one needs bold headers:
%unexpanded\def\tabl_tabulate_BC_first{\NC\let\fontstyle\globalfontstyle\bf}
-\unexpanded\def\tabl_tabulate_BC_first{\tabl_tabulate_column_normal\zerocount\let\fontstyle\globalfontstyle\bf}
+
+\unexpanded\def\tabl_tabulate_BC_first
+ {\tabl_tabulate_column_normal\plusone\zerocount
+ \let\fontstyle\globalfontstyle
+ \bf}
\appendtoks
- \let\VL\tabl_tabulate_VL_first
- \let\NC\tabl_tabulate_NC_first
- \let\BC\tabl_tabulate_BC_first
- \let\RC\tabl_tabulate_RC_first
- \let\HC\tabl_tabulate_HC_first
- \let\EQ\tabl_tabulate_EQ_first
- \let\RQ\tabl_tabulate_RQ_first
- \let\HQ\tabl_tabulate_HQ_first
- \let\NG\tabl_tabulate_NG_first
- \let\NN\tabl_tabulate_NN_first
- \let\ND\tabl_tabulate_ND_first
- \let\NR\tabl_tabulate_NR_first
- \let\NB\tabl_tabulate_NB_first
- \let\CC\tabl_tabulate_CC_first
- \let\CL\tabl_tabulate_CL_first
- \let\CM\tabl_tabulate_CM_first
- \let\CR\tabl_tabulate_CR_first
+ \let\VL\tabl_tabulate_VL_first
+ \let\NC\tabl_tabulate_NC_first
+ \let\BC\tabl_tabulate_BC_first
+ \let\RC\tabl_tabulate_RC_first
+ \let\HC\tabl_tabulate_HC_first
+ \let\EQ\tabl_tabulate_EQ_first
+ \let\RQ\tabl_tabulate_RQ_first
+ \let\HQ\tabl_tabulate_HQ_first
+ \let\NG\tabl_tabulate_NG_first
+ \let\NN\tabl_tabulate_NN_first
+ \let\ND\tabl_tabulate_ND_first
+ \let\NR\tabl_tabulate_NR_first
+ \let\NB\tabl_tabulate_NB_first
+ \let\CC\tabl_tabulate_CC_first
+ \let\CL\tabl_tabulate_CL_first
+ \let\CM\tabl_tabulate_CM_first
+ \let\CR\tabl_tabulate_CR_first
+ \let\NI\tabl_tabulate_NI_first
\to \t_tabl_tabulate_initializers_first
\appendtoks
- \let\NR\tabl_tabulate_NR_second
- \let\NB\tabl_tabulate_NB_second
+ \let\NR\tabl_tabulate_NR_second
+ \let\NB\tabl_tabulate_NB_second
\to \t_tabl_tabulate_initializers_second
\appendtoks
- \let\SR\NR
- \let\FR\NR
- \let\MR\NR
- \let\LR\NR
- \let\AR\NR
+ \let\SR\NR
+ \let\FR\NR
+ \let\MR\NR
+ \let\LR\NR
+ \let\AR\NR
\to \t_tabl_tabulate_initializers_first
\unexpanded\def\tabl_tabulate_NR_common#1#2%
@@ -2589,11 +2683,21 @@
%D \stopwhatever
%D \stoptyping
-%D This is needed because we soemtimes use the english command in
+%D This is needed because we sometimes use the english command in
%D tracing macros. In fact, most detailed tracing macros that
%D are done with \LUA\ only work in the english interface anyway.
-\definetabulate[tabulate] \setuptabulate[tabulate][\c!format=\v!none] % so no \v! here
+% \definetabulate[tabulate] \setuptabulate[tabulate][\c!format=\v!none] % so no \v! here
+
+\newconditional\c_tabl_generic
+
+\unexpanded\setuvalue{starttabulate}%
+ {\bgroup % whole thing
+ \settrue\c_tabl_generic
+ \let\currenttabulationparent\empty
+ \dodoubleempty\tabl_start_regular}
+
+\letvalue{stoptabulate}\relax
%D The following helpers are just there because we also have them at the \LUA\ end:
%D
diff --git a/tex/context/base/mkiv/tabl-tsp.mkiv b/tex/context/base/mkiv/tabl-tsp.mkiv
index eadcda16c..2c4b694b3 100644
--- a/tex/context/base/mkiv/tabl-tsp.mkiv
+++ b/tex/context/base/mkiv/tabl-tsp.mkiv
@@ -188,7 +188,7 @@
\forcelocalfloats
\setuplocalfloats[\c!before=,\c!after=,\c!inbetween=]%
\splitfloatcommand{\hbox to #1{\strut}}% dummy line
- \setbox\scratchbox\vbox{\flushlocalfloats}%
+ \setbox\scratchbox\vbox{\flushlocalfloats}% \vpack ?
\getnoflines{\ht\scratchbox}%
\resetlocalfloats
\advance\noflines\minusone % compensate dummy line
@@ -307,7 +307,7 @@
{\setbox\scratchbox\vsplit\tsplitcontent to \onepoint % \lineheight
\setbox\scratchbox\vbox % \vpack
{\unvbox\scratchbox
- \setbox\scratchbox\vbox
+ \setbox\scratchbox\vbox % \vpack
{\splitdiscards
\ifnum\lastpenalty>-\plustenthousand\else
% so that \bTR[before=\page] works
@@ -328,7 +328,7 @@
\ifvoid\tsplitcontent \exitloop \fi
\else\ifconditional\c_tabl_split_head
% we only have a tablehead so far
- \setbox\tsplitresult\vbox{\unvbox\tsplitresult\unvbox\scratchbox}%
+ \setbox\tsplitresult\vbox{\unvbox\tsplitresult\unvbox\scratchbox}% \vpack
\exitloop
\else\ifconditional\c_tabl_split_full
% we have text height available, but the (one) cell is too
diff --git a/tex/context/base/mkiv/tabl-xtb.lua b/tex/context/base/mkiv/tabl-xtb.lua
index dade345fc..524ca109c 100644
--- a/tex/context/base/mkiv/tabl-xtb.lua
+++ b/tex/context/base/mkiv/tabl-xtb.lua
@@ -65,11 +65,10 @@ local tonode = nuts.tonode
local getnext = nuts.getnext
local getprev = nuts.getprev
local getlist = nuts.getlist
-local getfield = nuts.getfield
+local getwidth = nuts.getwidth
local getbox = nuts.getbox
local getwhd = nuts.getwhd
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setdir = nuts.setdir
local setshift = nuts.setshift
@@ -129,7 +128,7 @@ function xtables.create(settings)
local widths = { }
local heights = { }
local depths = { }
- local spans = { }
+ -- local spans = { }
local distances = { }
local autowidths = { }
local modes = { }
@@ -144,7 +143,7 @@ function xtables.create(settings)
widths = widths,
heights = heights,
depths = depths,
- spans = spans,
+ -- spans = spans,
distances = distances,
modes = modes,
autowidths = autowidths,
@@ -251,9 +250,24 @@ function xtables.set_reflow_width()
local c = data.currentcolumn
local rows = data.rows
local row = rows[r]
+ local cold = c
while row[c].span do -- can also be previous row ones
c = c + 1
end
+ -- bah, we can have a span already
+ if c > cold then
+ local ro = row[cold]
+ local rx = ro.nx
+ local ry = ro.ny
+ if rx > 1 or ry > 1 then
+ local rn = row[c]
+ rn.nx = rx
+ rn.ny = ry
+ ro.nx = 1 -- or 0
+ ro.ny = 1 -- or 0
+ -- do we also need to set ro.span and rn.span
+ end
+ end
local tb = getbox("b_tabl_x")
local drc = row[c]
--
@@ -291,6 +305,8 @@ function xtables.set_reflow_width()
-- end
-- end
if drc.ny < 2 then
+ -- report_xtable("set width, old: ht=%p, dp=%p",heights[r],depths[r])
+ -- report_xtable("set width, new: ht=%p, dp=%p",height,depth)
if height > heights[r] then
heights[r] = height
end
@@ -307,11 +323,11 @@ function xtables.set_reflow_width()
local fixedcolumns = data.fixedcolumns
local fixedrows = data.fixedrows
if dimensionstate == 1 then
- if cspan > 1 then
- -- ignore width
- elseif width > fixedcolumns[c] then -- how about a span here?
- fixedcolumns[c] = width
- end
+ if cspan > 1 then
+ -- ignore width
+ elseif width > fixedcolumns[c] then -- how about a span here?
+ fixedcolumns[c] = width
+ end
elseif dimensionstate == 2 then
fixedrows[r] = height
elseif dimensionstate == 3 then
@@ -361,7 +377,7 @@ function xtables.set_reflow_width()
--
local nx, ny = drc.nx, drc.ny
if nx > 1 or ny > 1 then
- local spans = data.spans
+ -- local spans = data.spans -- not used
local self = true
for y=1,ny do
for x=1,nx do
@@ -370,9 +386,9 @@ function xtables.set_reflow_width()
else
local ry = r + y - 1
local cx = c + x - 1
- if y > 1 then
- spans[ry] = true
- end
+ -- if y > 1 then
+ -- spans[ry] = true -- not used
+ -- end
rows[ry][cx].span = true
end
end
@@ -433,13 +449,17 @@ function xtables.set_reflow_height()
--
if drc.ny < 2 then
if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
- local heights = data.heights
- local depths = data.depths
- if height > heights[r] then
- heights[r] = height
- end
- if depth > depths[r] then
- depths[r] = depth
+ if drc.ht + drc.dp <= height + depth then -- new per 2017-12-15
+ local heights = data.heights
+ local depths = data.depths
+ -- report_xtable("set height, old: ht=%p, dp=%p",heights[r],depths[r])
+ -- report_xtable("set height, new: ht=%p, dp=%p",height,depth)
+ if height > heights[r] then
+ heights[r] = height
+ end
+ if depth > depths[r] then
+ depths[r] = depth
+ end
end
end
end
@@ -472,7 +492,7 @@ function xtables.initialize_construct()
--
local width = widths[c]
local height = heights[r]
- local depth = depths[r]
+ local depth = depths[r] -- problem: can be the depth of a one liner
--
for x=1,drc.nx-1 do
width = width + widths[c+x]
@@ -519,7 +539,6 @@ function xtables.reflow_width()
local nofrows = data.nofrows
local nofcolumns = data.nofcolumns
local rows = data.rows
--- inspect(rows)
for r=1,nofrows do
local row = rows[r]
for c=1,nofcolumns do
@@ -551,8 +570,7 @@ function xtables.reflow_width()
showwidths("stage 1",widths,autowidths)
end
local noffrozen = 0
--- here we can also check spans
- -- inspect(data.fixedcspans)
+ -- here we can also check spans
if options[v_max] then
for c=1,nofcolumns do
width = width + widths[c]
@@ -677,8 +695,6 @@ function xtables.reflow_width()
--
data.currentrow = 0
data.currentcolumn = 0
- --
--- inspect(data)
end
function xtables.reflow_height()
@@ -739,8 +755,6 @@ function xtables.reflow_height()
end
end
end
- --
--- inspect(data)
end
local function showspans(data)
@@ -770,7 +784,7 @@ function xtables.construct()
local heights = data.heights
local depths = data.depths
local widths = data.widths
- local spans = data.spans
+ -- local spans = data.spans
local distances = data.distances
local modes = data.modes
local settings = data.settings
@@ -909,7 +923,7 @@ function xtables.construct()
texsetdimen("global","d_tabl_x_final_width",0)
else
texsetcount("global","c_tabl_x_state",1)
- texsetdimen("global","d_tabl_x_final_width",getfield(body[1][1],"width"))
+ texsetdimen("global","d_tabl_x_final_width",getwidth(body[1][1]))
end
end
@@ -1172,7 +1186,6 @@ function xtables.cleanup()
-- end
-- end
-- data.result = nil
- -- inspect(data)
data = table.remove(stack)
end
diff --git a/tex/context/base/mkiv/tabl-xtb.mkvi b/tex/context/base/mkiv/tabl-xtb.mkvi
index f7d682631..dc4a30e2b 100644
--- a/tex/context/base/mkiv/tabl-xtb.mkvi
+++ b/tex/context/base/mkiv/tabl-xtb.mkvi
@@ -15,7 +15,7 @@
\writestatus{loading}{ConTeXt Table Macros / Xtreme}
-\registerctxluafile{tabl-xtb}{1.001}
+\registerctxluafile{tabl-xtb}{}
% todo:
%
@@ -140,8 +140,12 @@
\c!autostrut=\v!no,
\c!split=\v!auto, % a number will take that many lines
\c!splitoffset=\zeropoint, % extra space taken
- % \c!aligncharacter=\v!no,
- % \c!alignmentcharacter={,},
+ \c!aligncharacter=\v!no,
+ \c!alignmentcharacter={,},
+ \c!alignmentleftsample=,
+ \c!alignmentrightsample=,
+ \c!alignmentleftwidth=\zeropoint,
+ \c!alignmentrightwidth=\zeropoint,
% \c!option=, % \v!stretch {\v!stretch,\v!width}
% \c!footer=,
% \c!header=,
@@ -335,6 +339,7 @@
\clf_x_table_cleanup
\dostoptagged
\resetbuffer[\tabl_x_current_buffer]%
+ \resetcharacteralign
\egroup}
% text flow split modes
@@ -480,8 +485,50 @@
{\tabl_x_start_cell_nop
\tabl_x_stop_cell}
+% \unexpanded\def\tabl_x_begin_of_cell
+% {%\inhibitblank % already in framed
+% \everypar{\delayedbegstrut}}
+
+\def\tabl_x_setup_character_align
+ {\edef\p_left {\directxtableparameter\c!alignmentleftsample}%
+ \edef\p_right{\directxtableparameter\c!alignmentrightsample}%
+ \ifx\p_left\empty
+ \scratchdimenone\dimexpr\directxtableparameter\c!alignmentleftwidth\relax
+ \else
+ \setbox\scratchbox\hbox{\p_left}%
+ \scratchdimenone\wd\scratchbox
+ \fi
+ \ifx\p_right\empty
+ \scratchdimentwo\dimexpr\directxtableparameter\c!alignmentrightwidth\relax
+ \else
+ \setbox\scratchbox\hbox{\p_right}%
+ \scratchdimentwo\wd\scratchbox
+ \fi
+ \clf_setcharacteraligndetail
+ \clf_x_table_c
+ {\directxtableparameter\c!alignmentcharacter}%
+ \scratchdimenone
+ \scratchdimentwo
+ \relax}
+
+\newtoks\t_tabl_x_every_cell
+
+% \appendtoks
+% \inhibitblank % already in framed
+% \to \t_tabl_x_every_cell
+
+\appendtoks
+ \edef\p_characteralign{\directxtableparameter\c!aligncharacter}%
+ \ifx\p_characteralign\v!yes
+ \ifcase\clf_x_table_r\or
+ \tabl_x_setup_character_align
+ \fi
+ \signalcharacteralign\clf_x_table_c\clf_x_table_r
+ \fi
+\to \t_tabl_x_every_cell
+
\unexpanded\def\tabl_x_begin_of_cell
- {%\inhibitblank % already in framed
+ {\the\t_tabl_x_every_cell
\everypar{\delayedbegstrut}}
\unexpanded\def\tabl_x_end_of_cell
@@ -519,7 +566,7 @@
\let\tabl_x_set_hsize\relax
\unexpanded\def\tabl_x_start_cell_reflow_width_yes[#settings]%
- {\setbox\b_tabl_x\hbox\bgroup
+ {\setbox\b_tabl_x\hpack\bgroup
\ifnum\c_tabl_x_nesting>\plusone
\letxtableparameter\c!width \v!fit % overloads given width
\letxtableparameter\c!height\v!fit % overloads given height
@@ -548,7 +595,7 @@
\tabl_x_set_hsize}
\unexpanded\def\tabl_x_start_cell_reflow_width_nop
- {\setbox\b_tabl_x\hbox\bgroup
+ {\setbox\b_tabl_x\hpack\bgroup
\ifnum\c_tabl_x_nesting>\plusone
\letxtableparameter\c!width \v!fit % overloads given width
\letxtableparameter\c!height\v!fit % overloads given height
@@ -568,7 +615,7 @@
\clf_x_table_set_reflow_width}
\unexpanded\def\tabl_x_start_cell_reflow_height_yes[#settings]%
- {\setbox\b_tabl_x\hbox\bgroup
+ {\setbox\b_tabl_x\hpack\bgroup
\clf_x_table_init_reflow_height
\ifcase\c_tabl_x_skip_mode % can be sped up
\ifnum\c_tabl_x_nesting>\plusone
@@ -582,7 +629,7 @@
\fi}
\unexpanded\def\tabl_x_start_cell_reflow_height_nop
- {\setbox\b_tabl_x\hbox\bgroup
+ {\setbox\b_tabl_x\hpack\bgroup
\clf_x_table_init_reflow_height
\ifcase\c_tabl_x_skip_mode % can be sped up
\ifnum\c_tabl_x_nesting>\plusone
@@ -604,7 +651,7 @@
\unexpanded\def\tabl_x_start_cell_construct_yes[#settings]%
{\dostarttagged\t!tablecell\empty % can't we just tag the box
- \setbox\b_tabl_x\hbox\bgroup
+ \setbox\b_tabl_x\hpack\bgroup
\setupcurrentxtable[#settings]%
\letxtableparameter\c!width \d_tabl_x_width % overloads given width
\letxtableparameter\c!height\d_tabl_x_height % overloads given height
@@ -615,7 +662,7 @@
\unexpanded\def\tabl_x_start_cell_construct_nop
{\dostarttagged\t!tablecell\empty % can't we just tag the box
- \setbox\b_tabl_x\hbox\bgroup
+ \setbox\b_tabl_x\hpack\bgroup
\letxtableparameter\c!width \d_tabl_x_width % overloads given width
\letxtableparameter\c!height\d_tabl_x_height % overloads given height (commenting it ... nice option)
\clf_x_table_init_construct
diff --git a/tex/context/base/mkiv/task-ini.lua b/tex/context/base/mkiv/task-ini.lua
index d0c00f5c8..f41fb9b08 100644
--- a/tex/context/base/mkiv/task-ini.lua
+++ b/tex/context/base/mkiv/task-ini.lua
@@ -25,6 +25,8 @@ local enableaction = tasks.enableaction
local freezegroup = tasks.freezegroup
local freezecallbacks = callbacks.freeze
+appendaction("processors", "normalizers", "languages.replacements.handler") -- disabled
+
appendaction("processors", "normalizers", "typesetters.wrappers.handler") -- disabled
appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on
appendaction("processors", "normalizers", "fonts.collections.process") -- disabled
@@ -38,22 +40,19 @@ appendaction("processors", "characters", "typesetters.cases.handler")
appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
-appendaction("processors", "words", "languages.replacements.handler") -- disabled
-
-appendaction("processors", "words", "languages.hyphenators.handler") -- always on
-
+------------("processors", "words", "languages.replacements.handler") -- disabled
appendaction("processors", "words", "languages.words.check") -- disabled -- might move up, no disc check needed then
-
+appendaction("processors", "words", "languages.hyphenators.handler") -- always on
appendaction("processors", "words", "typesetters.initials.handler") -- disabled -- might move up
-appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled -- might move down
+appendaction("processors", "words", "typesetters.firstlines.handler") -- disabled
appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental
appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
appendaction("processors", "fonts", "nodes.injections.handler")
appendaction("processors", "fonts", "typesetters.fontkerns.handler")
appendaction("processors", "fonts", "nodes.handlers.protectglyphs", nil, "nohead") -- maybe todo
-appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on (could be selective: if only node mode)
-appendaction("processors", "fonts", "builders.kernel.kerning") -- always on (could be selective: if only node mode)
+appendaction("processors", "fonts", "builders.kernel.ligaturing") -- not always on (could be selective: if only node mode)
+appendaction("processors", "fonts", "builders.kernel.kerning") -- not always on (could be selective: if only node mode)
appendaction("processors", "fonts", "nodes.handlers.stripping") -- disabled (might move)
------------("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling)
appendaction("processors", "fonts", "nodes.handlers.flatten")
@@ -66,13 +65,13 @@ appendaction("processors", "lists", "typesetters.digits.handler")
appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
appendaction("processors", "lists", "languages.visualizediscretionaries") -- disabled
-appendaction ("processors", "after", "typesetters.marksuspects")
+appendaction("processors", "after", "typesetters.marksuspects")
appendaction("shipouts", "normalizers", "typesetters.showsuspects")
appendaction("shipouts", "normalizers", "typesetters.margins.finalhandler") -- disabled
------------("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
appendaction("shipouts", "normalizers", "builders.paragraphs.expansion.trace") -- disabled
-appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
+appendaction("shipouts", "normalizers", "typesetters.alignments.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.destinations.handler") -- disabled
appendaction("shipouts", "normalizers", "nodes.rules.handler") -- disabled
@@ -102,7 +101,8 @@ appendaction("math", "normalizers", "noads.handlers.relocate", nil, "no
appendaction("math", "normalizers", "noads.handlers.families", nil, "nohead") -- always on
appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- * first-- always on
+appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- disabled
+appendaction("math", "normalizers", "noads.handlers.fixscripts",nil, "nohead") -- * first-- always on
appendaction("math", "normalizers", "noads.handlers.domains", nil, "nohead") -- * last -- disabled
appendaction("math", "normalizers", "noads.handlers.autofences",nil, "nohead") -- disabled
appendaction("math", "normalizers", "noads.handlers.resize", nil, "nohead") -- always on
@@ -117,6 +117,7 @@ appendaction("math", "builders", "builders.kernel.mlist_to_hlist")
------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled
appendaction("math", "builders", "typesetters.directions.processmath") -- disabled (has to happen pretty late)
appendaction("math", "builders", "noads.handlers.makeup", nil, "nohead") -- disabled (has to happen last)
+appendaction("math", "builders", "noads.handlers.align", nil, "nohead")
appendaction("finalizers", "lists", "typesetters.paragraphs.normalize") -- moved here
appendaction("finalizers", "lists", "typesetters.margins.localhandler") -- disabled
@@ -219,6 +220,7 @@ disableaction("contributers","nodes.handlers.textbackgrounds")
disableaction("math", "noads.handlers.showtree")
disableaction("math", "noads.handlers.tags")
disableaction("math", "noads.handlers.italics")
+disableaction("math", "noads.handlers.collapse")
disableaction("math", "noads.handlers.kernpairs")
disableaction("math", "noads.handlers.domains")
disableaction("math", "noads.handlers.classes")
diff --git a/tex/context/base/mkiv/task-ini.mkiv b/tex/context/base/mkiv/task-ini.mkiv
index 6cdbe739f..77e4fa985 100644
--- a/tex/context/base/mkiv/task-ini.mkiv
+++ b/tex/context/base/mkiv/task-ini.mkiv
@@ -17,6 +17,6 @@
%D Maybe we will make things configureable (speed up and such).
-\registerctxluafile{task-ini}{1.001}
+\registerctxluafile{task-ini}{}
\protect \endinput
diff --git a/tex/context/base/mkiv/toks-ini.lua b/tex/context/base/mkiv/toks-ini.lua
index 0ce7b4836..8f95f6692 100644
--- a/tex/context/base/mkiv/toks-ini.lua
+++ b/tex/context/base/mkiv/toks-ini.lua
@@ -9,12 +9,14 @@ tokens = tokens or { }
local tokens = tokens
local token = token -- the built in one
+local next = next
local tonumber = tonumber
local tostring = tostring
local utfchar = utf.char
local char = string.char
local printtable = table.print
local concat = table.concat
+local format = string.format
if setinspector then
@@ -54,40 +56,34 @@ if setinspector then
end
-local scan_toks = token.scan_toks
-local scan_string = token.scan_string
-local scan_int = token.scan_int
-local scan_code = token.scan_code
-local scan_dimen = token.scan_dimen
-local scan_glue = token.scan_glue
-local scan_keyword = token.scan_keyword
-local scan_token = token.scan_token
-local scan_word = token.scan_word
-local scan_number = token.scan_number
-local scan_csname = token.scan_csname
-
-local get_next = token.get_next
-
-if not token.get_macro then
- local scantoks = tex.scantoks
- local gettoks = tex.gettoks
- function token.get_meaning(name)
- scantoks("t_get_macro",tex.ctxcatcodes,"\\"..name)
- return gettoks("t_get_macro")
- end
- function token.get_macro(name)
- scantoks("t_get_macro",tex.ctxcatcodes,"\\"..name)
- local s = gettoks("t_get_macro")
- return match(s,"^.-%->(.*)$") or s
- end
+local scan_toks = token.scan_toks
+local scan_string = token.scan_string
+local scan_int = token.scan_int
+local scan_code = token.scan_code
+local scan_dimen = token.scan_dimen
+local scan_glue = token.scan_glue
+local scan_keyword = token.scan_keyword
+local scan_keyword_cs = token.scan_keyword_cs or scan_keyword
+local scan_token = token.scan_token
+local scan_word = token.scan_word
+local scan_number = token.scan_number
+local scan_csname = token.scan_csname
+
+local get_next = token.get_next
+
+local set_macro = token.set_macro
+local get_macro = token.get_macro
+local get_meaning = token.get_meaning
+local get_cmdname = token.get_cmdname
+local set_char = token.set_char
+local create_token = token.create
+
+if not set_char then -- for a while
+ local contextsprint = context.sprint
+ local ctxcatcodes = catcodes.numbers.ctxcatcodes
+ set_char = function(n,u) contextsprint(ctxcatcodes,format("\\chardef\\%s=%s",n,u)) end
end
-local set_macro = token.set_macro
-local get_macro = token.get_macro
-local get_meaning = token.get_meaning
-local get_cmdname = token.get_cmdname
-local create_token = token.create
-
function tokens.defined(name)
return get_cmdname(create_token(name)) ~= "undefined_cs"
end
@@ -101,28 +97,28 @@ end
-- end
local bits = {
- escape = 2^ 0,
- begingroup = 2^ 1,
- endgroup = 2^ 2,
- mathshift = 2^ 3,
- alignment = 2^ 4,
- endofline = 2^ 5,
- parameter = 2^ 6,
- superscript = 2^ 7,
- subscript = 2^ 8,
- ignore = 2^ 9,
- space = 2^10, -- 1024
- letter = 2^11,
- other = 2^12,
- active = 2^13,
- comment = 2^14,
- invalid = 2^15,
+ escape = 0x00000001, -- 2^00
+ begingroup = 0x00000002, -- 2^01
+ endgroup = 0x00000004, -- 2^02
+ mathshift = 0x00000008, -- 2^03
+ alignment = 0x00000010, -- 2^04
+ endofline = 0x00000020, -- 2^05
+ parameter = 0x00000040, -- 2^06
+ superscript = 0x00000080, -- 2^07
+ subscript = 0x00000100, -- 2^08
+ ignore = 0x00000200, -- 2^09
+ space = 0x00000400, -- 2^10 -- 1024
+ letter = 0x00000800, -- 2^11
+ other = 0x00001000, -- 2^12
+ active = 0x00002000, -- 2^13
+ comment = 0x00004000, -- 2^14
+ invalid = 0x00008000, -- 2^15
--
- character = 2^11 + 2^12,
- whitespace = 2^13 + 2^10, -- / needs more checking
+ character = 0x00001800, -- 2^11 + 2^12
+ whitespace = 0x00002400, -- 2^13 + 2^10 -- / needs more checking
--
- open = 2^10 + 2^1, -- space + begingroup
- close = 2^10 + 2^2, -- space + endgroup
+ open = 0x00000402, -- 2^10 + 2^01 -- space + begingroup
+ close = 0x00000404, -- 2^10 + 2^02 -- space + endgroup
}
-- for k, v in next, bits do bits[v] = k end
@@ -250,6 +246,7 @@ tokens.scanners = { -- these expand
number = scan_number,
boolean = scan_boolean,
keyword = scan_keyword,
+ keywordcs = scan_keyword_cs,
csname = scan_csname,
}
@@ -268,6 +265,7 @@ tokens.getters = { -- these don't expand
tokens.setters = {
macro = set_macro,
+ char = set_char,
count = tex.setcount,
dimen = tex.setdimen,
skip = tex.setglue,
diff --git a/tex/context/base/mkiv/toks-ini.mkiv b/tex/context/base/mkiv/toks-ini.mkiv
index aaa735207..9d3375432 100644
--- a/tex/context/base/mkiv/toks-ini.mkiv
+++ b/tex/context/base/mkiv/toks-ini.mkiv
@@ -17,9 +17,9 @@
\newtoks\t_get_macro % will go away
-\registerctxluafile{toks-ini}{1.001}
-\registerctxluafile{toks-scn}{1.001}
-\registerctxluafile{cldf-scn}{1.001}
-\registerctxluafile{cldf-stp}{1.001}
+\registerctxluafile{toks-ini}{}
+\registerctxluafile{toks-scn}{}
+\registerctxluafile{cldf-scn}{}
+\registerctxluafile{cldf-stp}{}
\protect \endinput
diff --git a/tex/context/base/mkiv/toks-map.mkiv b/tex/context/base/mkiv/toks-map.mkiv
index f1b63a68b..829daa39d 100644
--- a/tex/context/base/mkiv/toks-map.mkiv
+++ b/tex/context/base/mkiv/toks-map.mkiv
@@ -13,7 +13,7 @@
% \writestatus{loading}{ConTeXt Token Support / Remapping}
%
-% \registerctxluafile{toks-map}{1.001}
+% \registerctxluafile{toks-map}{}
%
% \unprotect
%
diff --git a/tex/context/base/mkiv/toks-scn.lua b/tex/context/base/mkiv/toks-scn.lua
index 3c41eedd8..f3781d022 100644
--- a/tex/context/base/mkiv/toks-scn.lua
+++ b/tex/context/base/mkiv/toks-scn.lua
@@ -24,6 +24,7 @@ local scanstring = scanners.string
local scaninteger = scanners.integer
local scannumber = scanners.number
local scankeyword = scanners.keyword
+local scankeywordcs = scanners.keywordcs
local scanword = scanners.word
local scancode = scanners.code
local scanboolean = scanners.boolean
@@ -117,6 +118,7 @@ local shortcuts = {
scaninteger = scaninteger,
scannumber = scannumber,
scankeyword = scankeyword,
+ scankeywordcs = scankeywordcs,
scanword = scanword,
scancode = scancode,
scanboolean = scanboolean,
@@ -163,21 +165,21 @@ tokens.converters = {
-- that I then need to check the TeX end. More pain than gain and a bit
-- risky too.
-local f_if = formatters[ " if scankeyword('%s') then data['%s'] = scan%s()"]
-local f_elseif = formatters[" elseif scankeyword('%s') then data['%s'] = scan%s()"]
+local f_if = formatters[ " if scankeywordcs('%s') then data['%s'] = scan%s()"]
+local f_elseif = formatters[" elseif scankeywordcs('%s') then data['%s'] = scan%s()"]
----- f_if = formatters[" local key = scanword() if key == '' then break elseif key == '%s' then data['%s'] = scan%s()"]
----- f_elseif = formatters[" elseif key == '%s' then data['%s'] = scan%s()"]
------ f_if_x = formatters[ " if not data['%s'] and scankeyword('%s') then data['%s'] = scan%s()"]
------ f_elseif_x = formatters[" elseif not data['%s'] and scankeyword('%s') then data['%s'] = scan%s()"]
+----- f_if_x = formatters[ " if not data['%s'] and scankeywordcs('%s') then data['%s'] = scan%s()"]
+----- f_elseif_x = formatters[" elseif not data['%s'] and scankeywordcs('%s') then data['%s'] = scan%s()"]
local f_local = formatters["local scan%s = scanners.%s"]
local f_scan = formatters["scan%s()"]
local f_shortcut = formatters["local %s = scanners.converters.%s"]
-local f_if_c = formatters[ " if scankeyword('%s') then data['%s'] = %s(scan%s())"]
-local f_elseif_c = formatters[" elseif scankeyword('%s') then data['%s'] = %s(scan%s())"]
+local f_if_c = formatters[ " if scankeywordcs('%s') then data['%s'] = %s(scan%s())"]
+local f_elseif_c = formatters[" elseif scankeywordcs('%s') then data['%s'] = %s(scan%s())"]
local f_scan_c = formatters["%s(scan%s())"]
-- see above
diff --git a/tex/context/base/mkiv/toks-tra.mkiv b/tex/context/base/mkiv/toks-tra.mkiv
index 6186402a7..04f837f1c 100644
--- a/tex/context/base/mkiv/toks-tra.mkiv
+++ b/tex/context/base/mkiv/toks-tra.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Token Support / Tracing}
-\registerctxluafile{toks-tra}{1.001}
+\registerctxluafile{toks-tra}{}
\unprotect
diff --git a/tex/context/base/mkiv/trac-ctx.lua b/tex/context/base/mkiv/trac-ctx.lua
index 493ce7936..29159d130 100644
--- a/tex/context/base/mkiv/trac-ctx.lua
+++ b/tex/context/base/mkiv/trac-ctx.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['trac-ctx'] = {
license = "see context related readme files"
}
+local next = next
+
local context = context
local implement = interfaces.implement
local register = trackers.register
diff --git a/tex/context/base/mkiv/trac-ctx.mkiv b/tex/context/base/mkiv/trac-ctx.mkiv
index 4240281a6..66b5ef1ee 100644
--- a/tex/context/base/mkiv/trac-ctx.mkiv
+++ b/tex/context/base/mkiv/trac-ctx.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Tracing Macros / TeX Trackers}
-\registerctxluafile{trac-ctx}{1.001}
+\registerctxluafile{trac-ctx}{}
\unprotect
diff --git a/tex/context/base/mkiv/trac-deb.mkiv b/tex/context/base/mkiv/trac-deb.mkiv
index 1e61a3512..b5366d88c 100644
--- a/tex/context/base/mkiv/trac-deb.mkiv
+++ b/tex/context/base/mkiv/trac-deb.mkiv
@@ -15,8 +15,8 @@
\unprotect
-%registerctxluafile{trac-lmx}{1.001}
-\registerctxluafile{trac-deb}{1.001}
+%registerctxluafile{trac-lmx}{}
+\registerctxluafile{trac-deb}{}
\unexpanded\def\breakpoint {\showdebuginfo\wait}
diff --git a/tex/context/base/mkiv/trac-fil.lua b/tex/context/base/mkiv/trac-fil.lua
index 8cc903e2a..f422c9f6b 100644
--- a/tex/context/base/mkiv/trac-fil.lua
+++ b/tex/context/base/mkiv/trac-fil.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['trac-fil'] = {
license = "see context related readme files"
}
-local rawset, tonumber, type, pcall = rawset, tonumber, type, pcall
+local rawset, tonumber, type, pcall, next = rawset, tonumber, type, pcall, next
local format, concat = string.format, table.concat
local openfile = io.open
local date = os.date
diff --git a/tex/context/base/mkiv/trac-inf.lua b/tex/context/base/mkiv/trac-inf.lua
index f66485015..5810e2059 100644
--- a/tex/context/base/mkiv/trac-inf.lua
+++ b/tex/context/base/mkiv/trac-inf.lua
@@ -84,7 +84,7 @@ local seconds = function(n) return n or 0 end
local function starttiming(instance)
local timer = timers[instance or "notimer"]
- local it = timer.timing or 0
+ local it = timer.timing
if it == 0 then
timer.starttime = ticks()
if not timer.loadtime then
@@ -116,7 +116,7 @@ end
local function elapsed(instance)
if type(instance) == "number" then
- return instance or 0
+ return instance
else
local timer = timers[instance or "notimer"]
return timer and seconds(timer.loadtime) or 0
@@ -167,8 +167,12 @@ function statistics.show()
return format("%s, type: %s, binary subtree: %s",
os.platform or "unknown",os.type or "unknown", environment.texos or "unknown")
end)
- register("luatex banner", function()
- return lower(status.banner)
+ -- register("luatex banner", function()
+ -- return lower(status.banner)
+ -- end)
+ register("used engine", function()
+ return format("%s version %s with functionality level %s, banner: %s",
+ LUATEXENGINE, LUATEXVERSION, LUATEXFUNCTIONALITY, lower(status.banner))
end)
register("control sequences", function()
return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
@@ -192,8 +196,9 @@ function statistics.show()
local hashchar = tonumber(status.luatex_hashchars)
local hashtype = status.luatex_hashtype
local mask = lua.mask or "ascii"
- return format("engine: %s, used memory: %s, hash type: %s, hash chars: min(%s,40), symbol mask: %s (%s)",
+ return format("engine: %s %s, used memory: %s, hash type: %s, hash chars: min(%i,40), symbol mask: %s (%s)",
jit and "luajit" or "lua",
+ LUAVERSION,
statistics.memused(),
hashtype or "default",
hashchar and 2^hashchar or "unknown",
diff --git a/tex/context/base/mkiv/trac-jus.lua b/tex/context/base/mkiv/trac-jus.lua
index 6d00bf19e..e7a030257 100644
--- a/tex/context/base/mkiv/trac-jus.lua
+++ b/tex/context/base/mkiv/trac-jus.lua
@@ -18,7 +18,6 @@ local nuts = nodes.nuts
local tonut = nuts.tonut
local getfield = nuts.getfield
-local setfield = nuts.setfield
local getlist = nuts.getlist
local getattr = nuts.getattr
local setattr = nuts.setattr
diff --git a/tex/context/base/mkiv/trac-jus.mkiv b/tex/context/base/mkiv/trac-jus.mkiv
index 71d88b00e..f2e93eff7 100644
--- a/tex/context/base/mkiv/trac-jus.mkiv
+++ b/tex/context/base/mkiv/trac-jus.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Tracing Macros / Justification}
-\registerctxluafile{trac-jus}{1.001}
+\registerctxluafile{trac-jus}{}
\unprotect
diff --git a/tex/context/base/mkiv/trac-lmx.lua b/tex/context/base/mkiv/trac-lmx.lua
index 4f4ea62c4..c7f949618 100644
--- a/tex/context/base/mkiv/trac-lmx.lua
+++ b/tex/context/base/mkiv/trac-lmx.lua
@@ -695,6 +695,8 @@ if arg then
if arg[1] == "--show" then if arg[2] then lmx.show (arg[2]) end
elseif arg[1] == "--convert" then if arg[2] then lmx.convert(arg[2], arg[3] or "temp.html") end
end
+else
+ return lmx
end
-- Test 1:
diff --git a/tex/context/base/mkiv/trac-log.lua b/tex/context/base/mkiv/trac-log.lua
index b6bb123cf..c7b6f6d2d 100644
--- a/tex/context/base/mkiv/trac-log.lua
+++ b/tex/context/base/mkiv/trac-log.lua
@@ -20,8 +20,8 @@ local openfile = io.open
local setmetatableindex = table.setmetatableindex
local formatters = string.formatters
-
-local texgetcount = tex and tex.getcount
+local settings_to_hash = utilities.parsers.settings_to_hash
+local sortedkeys = table.sortedkeys
-- variant is set now
@@ -575,15 +575,15 @@ logs.newline = newline
-- todo: renew (un) locks when a new one is added and wildcard
-local data, states = { }, nil
+local data = { }
+local states = nil
+local force = false
function logs.reporter(category,subcategory)
local logger = data[category]
if not logger then
- local state = false
- if states == true then
- state = true
- elseif type(states) == "table" then
+ local state = states == true
+ if not state and type(states) == "table" then
for c, _ in next, states do
if find(category,c) then
state = true
@@ -593,7 +593,7 @@ function logs.reporter(category,subcategory)
end
logger = {
reporters = { },
- state = state,
+ state = state,
}
data[category] = logger
end
@@ -601,7 +601,7 @@ function logs.reporter(category,subcategory)
if not reporter then
if subcategory then
reporter = function(...)
- if not logger.state then
+ if force or not logger.state then
subreport(category,subcategory,...)
end
end
@@ -609,7 +609,7 @@ function logs.reporter(category,subcategory)
else
local tag = category
reporter = function(...)
- if not logger.state then
+ if force or not logger.state then
report(category,...)
end
end
@@ -645,7 +645,7 @@ end
-- so far
local function setblocked(category,value) -- v.state == value == true : disable
- if category == true then
+ if category == true or category == "all" then
-- lock all
category, value = "*", true
elseif category == false then
@@ -661,7 +661,8 @@ local function setblocked(category,value) -- v.state == value == true : disable
v.state = value
end
else
- states = utilities.parsers.settings_to_hash(category,type(states)=="table" and states or nil)
+ alllocked = false
+ states = settings_to_hash(category,type(states)=="table" and states or nil)
for c in next, states do
local v = data[c]
if v then
@@ -687,7 +688,7 @@ function logs.enable(category)
end
function logs.categories()
- return table.sortedkeys(data)
+ return sortedkeys(data)
end
function logs.show()
@@ -710,7 +711,7 @@ function logs.show()
max = m
end
end
- local subcategories = concat(table.sortedkeys(reporters),", ")
+ local subcategories = concat(sortedkeys(reporters),", ")
if state == true then
state = "disabled"
elseif state == false then
@@ -746,59 +747,64 @@ end)
-- tex specific loggers (might move elsewhere)
-local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it
-
-local real, user, sub
+if tex then
-function logs.start_page_number()
- real = texgetcount("realpageno")
- user = texgetcount("userpageno")
- sub = texgetcount("subpageno")
-end
+ local report = logs.reporter("pages") -- not needed but saves checking when we grep for it
+ local texgetcount = tex and tex.getcount
-local timing = false
-local starttime = nil
-local lasttime = nil
+ local real, user, sub
-trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
- starttime = os.clock()
- timing = true
-end)
+ function logs.start_page_number()
+ real = texgetcount("realpageno")
+ user = texgetcount("userpageno")
+ sub = texgetcount("subpageno")
+ end
-function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
- if timing then
- local elapsed, average
- local stoptime = os.clock()
- if not lasttime or real < 2 then
- elapsed = stoptime
- average = stoptime
- starttime = stoptime
- else
- elapsed = stoptime - lasttime
- average = (stoptime - starttime) / (real - 1)
- end
- lasttime = stoptime
- if real <= 0 then
- report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
- elseif user <= 0 then
- report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
- elseif sub <= 0 then
- report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
- else
- report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
- end
- else
- if real <= 0 then
- report_pages("flushing page")
- elseif user <= 0 then
- report_pages("flushing realpage %s",real)
- elseif sub <= 0 then
- report_pages("flushing realpage %s, userpage %s",real,user)
+ local timing = false
+ local starttime = nil
+ local lasttime = nil
+
+ trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock() -- todo: use other timer
+ timing = true
+ end)
+
+ function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real <= 0 then
+ report("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user <= 0 then
+ report("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub <= 0 then
+ report("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ end
else
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ if real <= 0 then
+ report("flushing page")
+ elseif user <= 0 then
+ report("flushing realpage %s",real)
+ elseif sub <= 0 then
+ report("flushing realpage %s, userpage %s",real,user)
+ else
+ report("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ end
end
+ logs.flush()
end
- logs.flush()
+
end
-- we don't have show_open and show_close callbacks yet
@@ -1039,3 +1045,104 @@ io.stderr:setvbuf('no')
if package.helpers.report then
package.helpers.report = logs.reporter("package loader") -- when used outside mtxrun
end
+
+if tex then
+
+ local finalactions = { }
+ local fatalerrors = { }
+ local possiblefatal = { }
+ local loggingerrors = false
+
+ function logs.loggingerrors()
+ return loggingerrors
+ end
+
+ directives.register("logs.errors",function(v)
+ loggingerrors = v
+ if type(v) == "string" then
+ fatalerrors = settings_to_hash(v)
+ else
+ fatalerrors = { }
+ end
+ end)
+
+ function logs.registerfinalactions(...)
+ insert(finalactions,...) -- so we can force an order if needed
+ end
+
+ local what = nil
+ local report = nil
+ local state = nil
+ local target = nil
+
+ local function startlogging(t,r,w,s)
+ target = t
+ state = force
+ force = true
+ report = type(r) == "function" and r or logs.reporter(r)
+ what = w
+ pushtarget(target)
+ newline()
+ if s then
+ report("start %s: %s",what,s)
+ else
+ report("start %s",what)
+ end
+ if target == "logfile" then
+ newline()
+ end
+ return report
+ end
+
+ local function stoplogging()
+ if target == "logfile" then
+ newline()
+ end
+ report("stop %s",what)
+ if target == "logfile" then
+ newline()
+ end
+ poptarget()
+ state = oldstate
+ end
+
+ function logs.startfilelogging(...)
+ return startlogging("logfile", ...)
+ end
+
+ logs.stopfilelogging = stoplogging
+
+ local done = false
+
+ function logs.starterrorlogging(r,w,...)
+ if not done then
+ pushtarget("terminal")
+ newline()
+ logs.report("error logging","start possible issues")
+ poptarget()
+ done = true
+ end
+ if fatalerrors[w] then
+ possiblefatal[w] = true
+ end
+ return startlogging("terminal",r,w,...)
+ end
+
+ logs.stoperrorlogging = stoplogging
+
+ function logs.finalactions()
+ if #finalactions > 0 then
+ for i=1,#finalactions do
+ finalactions[i]()
+ end
+ if done then
+ pushtarget("terminal")
+ newline()
+ logs.report("error logging","stop possible issues")
+ poptarget()
+ end
+ return next(possiblefatal) and sortedkeys(possiblefatal) or false
+ end
+ end
+
+end
diff --git a/tex/context/base/mkiv/trac-pro.lua b/tex/context/base/mkiv/trac-pro.lua
index 897b6a15c..cbaae7f0c 100644
--- a/tex/context/base/mkiv/trac-pro.lua
+++ b/tex/context/base/mkiv/trac-pro.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['trac-pro'] = {
license = "see context related readme files"
}
-local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+local getmetatable, setmetatable, rawset, type, next = getmetatable, setmetatable, rawset, type, next
-- The protection implemented here is probably not that tight but good enough to catch
-- problems due to naive usage.
diff --git a/tex/context/base/mkiv/trac-set.lua b/tex/context/base/mkiv/trac-set.lua
index d0047650f..530915fe0 100644
--- a/tex/context/base/mkiv/trac-set.lua
+++ b/tex/context/base/mkiv/trac-set.lua
@@ -8,8 +8,8 @@ if not modules then modules = { } end modules ['trac-set'] = { -- might become u
-- maybe this should be util-set.lua
-local type, next, tostring = type, next, tostring
-local concat = table.concat
+local type, next, tostring, tonumber = type, next, tostring, tonumber
+local concat, sortedhash = table.concat, table.sortedhash
local format, find, lower, gsub, topattern = string.format, string.find, string.lower, string.gsub, string.topattern
local is_boolean = string.is_boolean
local settings_to_hash = utilities.parsers.settings_to_hash
@@ -26,6 +26,8 @@ local data = { }
-- We can initialize from the cnf file. This is sort of tricky as
-- later defined setters also need to be initialized then. If set
-- this way, we need to ensure that they are not reset later on.
+--
+-- The sorting is needed to get a predictable setters in case of *.
local trace_initialize = false -- only for testing during development
@@ -36,7 +38,7 @@ function setters.initialize(filename,name,values) -- filename only for diagnosti
-- trace_initialize = true
local data = setter.data
if data then
- for key, newvalue in next, values do
+ for key, newvalue in sortedhash(values) do
local newvalue = is_boolean(newvalue,newvalue,true) -- strict
local functions = data[key]
if functions then
@@ -91,7 +93,7 @@ local function set(t,what,newvalue)
done = { }
t.done = done
end
- for w, value in next, what do
+ for w, value in sortedhash(what) do
if value == "" then
value = newvalue
elseif not value then
@@ -100,7 +102,7 @@ local function set(t,what,newvalue)
value = is_boolean(value,value,true) -- strict
end
w = topattern(w,true,true)
- for name, functions in next, data do
+ for name, functions in sortedhash(data) do
if done[name] then
-- prevent recursion due to wildcards
elseif find(name,w) then
@@ -118,7 +120,7 @@ end
local function reset(t)
local data = t.data
if not data.frozen then
- for name, functions in next, data do
+ for name, functions in sortedthash(data) do
for i=1,#functions do
functions[i](false)
end
@@ -219,13 +221,30 @@ function setters.show(t)
local name = list[k]
local functions = t.data[name]
if functions then
- local value, default, modules = functions.value, functions.default, #functions
- value = value == nil and "unset" or tostring(value)
- default = default == nil and "unset" or tostring(default)
- t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ local value = functions.value
+ local default = functions.default
+ local modules = #functions
+ if default == nil then
+ default = "unset"
+ elseif type(default) == "table" then
+ default = concat(default,"|")
+ else
+ default = tostring(default)
+ end
+ if value == nil then
+ value = "unset"
+ elseif type(value) == "table" then
+ value = concat(value,"|")
+ else
+ value = tostring(value)
+ end
+ t.report(name)
+ t.report(" modules : %i",modules)
+ t.report(" default : %s",default)
+ t.report(" value : %s",value)
+ t.report()
end
end
- t.report()
end
-- we could have used a bit of oo and the trackers:enable syntax but
diff --git a/tex/context/base/mkiv/trac-tex.lua b/tex/context/base/mkiv/trac-tex.lua
index 66cdc2c91..66ff94721 100644
--- a/tex/context/base/mkiv/trac-tex.lua
+++ b/tex/context/base/mkiv/trac-tex.lua
@@ -16,10 +16,6 @@ local undefined = create("undefined").command
function trackers.savehash()
saved = texhashtokens()
- if type(saved[1]) == "table" then
- -- LUATEXVERSION < 1.002
- saved = table.tohash(saved)
- end
return saved
end
@@ -27,10 +23,6 @@ function trackers.dumphashtofile(filename,delta)
local list = { }
local hash = texhashtokens()
local create = token.create
- if type(hash[1]) == "table" then
- -- LUATEXVERSION < 1.002
- hash = table.sortedkeys(hash)
- end
for i=1,#hash do
local name = hash[i]
if not delta or not saved[name] then
@@ -84,7 +76,7 @@ local function saveusedfilesintrees(format)
jobname = environment.jobname or "?",
version = environment.version or "?",
kind = environment.kind or "?",
- files = resolvers.instance.foundintrees
+ files = resolvers.foundintrees()
}
local filename = file.replacesuffix(environment.jobname or "context-job",'jlg')
if format == "lua" then
diff --git a/tex/context/base/mkiv/trac-tex.mkiv b/tex/context/base/mkiv/trac-tex.mkiv
index 53c95ab01..4c2db68f3 100644
--- a/tex/context/base/mkiv/trac-tex.mkiv
+++ b/tex/context/base/mkiv/trac-tex.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Tracking Macros / TeX}
-\registerctxluafile{trac-tex}{1.001}
+\registerctxluafile{trac-tex}{}
%D All tracing flags at the \TEX\ end will be redone this way so
%D that we have a similar mechanism for \TEX\ and \LUA. Also, the
diff --git a/tex/context/base/mkiv/trac-vis.lua b/tex/context/base/mkiv/trac-vis.lua
index 5d98bc24f..0e37752db 100644
--- a/tex/context/base/mkiv/trac-vis.lua
+++ b/tex/context/base/mkiv/trac-vis.lua
@@ -6,28 +6,22 @@ if not modules then modules = { } end modules ['trac-vis'] = {
license = "see context related readme files"
}
-local string, number, table = string, number, table
local node, nodes, attributes, fonts, tex = node, nodes, attributes, fonts, tex
-local type = type
+local type, tonumber, next = type, tonumber, next
local gmatch = string.gmatch
local formatters = string.formatters
-
--- This module started out in the early days of mkiv and luatex with
--- visualizing kerns related to fonts. In the process of cleaning up the
--- visual debugger code it made sense to integrate some other code that
--- I had laying around and replace the old supp-vis debugging code. As
--- only a subset of the old visual debugger makes sense it has become a
--- different implementation. Soms of the m-visual functionality will also
--- be ported. The code is rather trivial. The caching is not really needed
--- but saves upto 50% of the time needed to add visualization. Of course
--- the overall runtime is larger because of color and layer processing in
--- the backend (can be times as much) so the runtime is somewhat larger
--- with full visualization enabled. In practice this will never happen
--- unless one is demoing.
-
--- We could use pdf literals and re stream codes but it's not worth the
--- trouble because we would end up in color etc mess. Maybe one day I'll
--- make a nodeinjection variant.
+local compactfloat = number.compactfloat
+
+-- This module started out in the early days of mkiv and luatex with visualizing
+-- kerns related to fonts. In the process of cleaning up the visual debugger code it
+-- made sense to integrate some other code that I had laying around and replace the
+-- old supp-vis debugging code. As only a subset of the old visual debugger makes
+-- sense it has become a different implementation. Soms of the m-visual
+-- functionality will also be ported. The code is rather trivial. The caching is not
+-- really needed but saves upto 50% of the time needed to add visualization. Of
+-- course the overall runtime is larger because of color and layer processing in the
+-- backend (can be times as much) so the runtime is somewhat larger with full
+-- visualization enabled. In practice this will never happen unless one is demoing.
-- todo: global switch (so no attributes)
-- todo: maybe also xoffset, yoffset of glyph
@@ -37,38 +31,12 @@ local formatters = string.formatters
-- todo: dir and localpar nodes
local nodecodes = nodes.nodecodes
-local disc_code = nodecodes.disc
-local kern_code = nodecodes.kern
-local glyph_code = nodecodes.glyph
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local whatsit_code = nodecodes.whatsit
-local user_code = nodecodes.user
-local math_code = nodecodes.math
-local gluespec_code = nodecodes.gluespec
-
-local kerncodes = nodes.kerncodes
-local font_kern_code = kerncodes.fontkern
-local user_kern_code = kerncodes.userkern
-
-local gluecodes = nodes.gluecodes
-local cleaders_code = gluecodes.cleaders
-local userskip_code = gluecodes.userskip
-local space_code = gluecodes.space
-local xspace_code = gluecodes.xspace
-local leftskip_code = gluecodes.leftskip
-local rightskip_code = gluecodes.rightskip
-
-local whatsitcodes = nodes.whatsitcodes
-local mathcodes = nodes.mathcodes
local nuts = nodes.nuts
local tonut = nuts.tonut
local tonode = nuts.tonode
-local setfield = nuts.setfield
+----- setfield = nuts.setfield
local setboth = nuts.setboth
local setlink = nuts.setlink
local setdisc = nuts.setdisc
@@ -97,6 +65,7 @@ local getkern = nuts.getkern
local getpenalty = nuts.getpenalty
local getdir = nuts.getdir
local getwidth = nuts.getwidth
+local getdepth = nuts.getdepth
local getshift = nuts.getshift
local hpack_nodes = nuts.hpack
@@ -115,6 +84,8 @@ local hpack_string = nuts.typesetters.tohpack
local texgetattribute = tex.getattribute
local texsetattribute = tex.setattribute
+local setmetatableindex = table.setmetatableindex
+
local unsetvalue = attributes.unsetvalue
local current_font = font.current
@@ -144,57 +115,61 @@ local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
local a_visual = attributes.private("visual")
-local a_fontkern = attributes.private("fontkern")
local a_layer = attributes.private("viewerlayer")
-local hasbit = number.hasbit
-local bit = number.bit
-local setbit = number.setbit
-local clearbit = number.clearbit
+local band = bit32.band
+local bor = bit32.bor
local enableaction = nodes.tasks.enableaction
-local trace_hbox
-local trace_vbox
-local trace_vtop
-local trace_kern
-local trace_glue
-local trace_penalty
-local trace_fontkern
-local trace_strut
-local trace_whatsit
-local trace_user
-local trace_math
-local trace_italic
-local trace_discretionary
+-- local trace_hbox
+-- local trace_vbox
+-- local trace_vtop
+-- local trace_kern
+-- local trace_glue
+-- local trace_penalty
+-- local trace_fontkern
+-- local trace_strut
+-- local trace_whatsit
+-- local trace_user
+-- local trace_math
+-- local trace_italic
+-- local trace_discretionary
+-- local trace_expansion
+-- local trace_line
+-- local trace_space
local report_visualize = logs.reporter("visualize")
local modes = {
- hbox = 1,
- vbox = 2,
- vtop = 4,
- kern = 8,
- glue = 16,
- -- skip = 16,
- penalty = 32,
- fontkern = 64,
- strut = 128,
- whatsit = 256,
- glyph = 512,
- simple = 1024,
- simplehbox = 1024 + 1,
- simplevbox = 1024 + 2,
- simplevtop = 1024 + 4,
- user = 2048,
- math = 4096,
- italic = 8192,
- origin = 16384,
- discretionary = 32768,
+ hbox = 1,
+ vbox = 2,
+ vtop = 4,
+ kern = 8,
+ glue = 16,
+ -- skip = 16,
+ penalty = 32,
+ fontkern = 64,
+ strut = 128,
+ whatsit = 256,
+ glyph = 512,
+ simple = 1024,
+ simplehbox = 1024 + 1,
+ simplevbox = 1024 + 2,
+ simplevtop = 1024 + 4,
+ user = 2048,
+ math = 4096,
+ italic = 8192,
+ origin = 16384,
+ discretionary = 32768,
+ expansion = 65536,
+ line = 131072,
+ space = 262144,
+ depth = 524288,
}
local usedfont, exheight, emwidth
-local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user, l_math, l_italic, l_origin, l_discretionary
+local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user, l_math, l_italic, l_origin, l_discretionary, l_expansion, l_line, l_space, l_depth
local enabled = false
local layers = { }
@@ -213,6 +188,8 @@ end
-- we can preset a bunch of bits
+local userrule -- bah, not yet defined: todo, delayed(nuts.rules,"userrule")
+
local function enable()
if not usedfont then
-- we use a narrow monospaced font -- infofont ?
@@ -244,10 +221,18 @@ local function enable()
l_italic = layers.italic
l_origin = layers.origin
l_discretionary = layers.discretionary
+ l_expansion = layers.expansion
+ l_line = layers.line
+ l_space = layers.space
+ l_depth = layers.depth
enableaction("shipouts","nodes.visualizers.handler")
report_visualize("enabled")
enabled = true
tex.setcount("global","c_syst_visualizers_state",1) -- so that we can optimize at the tex end
+ --
+ if not userrule then
+ userrule = nuts.rules.userrule
+ end
end
local function setvisual(n,a,what,list) -- this will become more efficient when we have the bit lib linked in
@@ -257,13 +242,13 @@ local function setvisual(n,a,what,list) -- this will become more efficient when
if not a or a == 0 or a == unsetvalue then
a = preset_makeup
else
- a = setbit(a,preset_makeup)
+ a = bor(a,preset_makeup)
end
elseif n == "boxes" then
if not a or a == 0 or a == unsetvalue then
a = preset_boxes
else
- a = setbit(a,preset_boxes)
+ a = bor(a,preset_boxes)
end
elseif n == "all" then
if what == false then
@@ -271,7 +256,7 @@ local function setvisual(n,a,what,list) -- this will become more efficient when
elseif not a or a == 0 or a == unsetvalue then
a = preset_all
else
- a = setbit(a,preset_all)
+ a = bor(a,preset_all)
end
else
for s in gmatch(n,"[a-z]+") do
@@ -281,7 +266,7 @@ local function setvisual(n,a,what,list) -- this will become more efficient when
elseif not a or a == 0 or a == unsetvalue then
a = m
else
- a = setbit(a,m)
+ a = bor(a,m)
end
end
end
@@ -297,18 +282,53 @@ function nuts.setvisual(n,mode)
setattr(n,a_visual,setvisual(mode,getattr(n,a_visual),true))
end
-function nuts.setvisuals(n,mode)
+function nuts.setvisuals(n,mode) -- currently the same
setattr(n,a_visual,setvisual(mode,getattr(n,a_visual),true,true))
end
-function nuts.applyvisuals(n,mode)
+-- fast setters
+
+do
+
+ local cached = setmetatableindex(function(t,k)
+ if k == true then
+ return texgetattribute(a_visual)
+ elseif not k then
+ t[k] = unsetvalue
+ return unsetvalue
+ else
+ local v = setvisual(k)
+ t[k] = v
+ return v
+ end
+ end)
+
+ -- local function applyvisuals(n,mode)
+ -- local a = cached[mode]
+ -- apply_to_nodes(n,function(n) setattr(n,a_visual,a) end)
+ -- end
+
local a = unsetvalue
- if mode == true then
- a = texgetattribute (a_visual)
- elseif mode then
- a = setvisual(mode)
+
+ local f = function(n) setattr(n,a_visual,a) end
+
+ local function applyvisuals(n,mode)
+ a = cached[mode]
+ apply_to_nodes(n,f)
+ end
+
+ nuts.applyvisuals = applyvisuals
+
+ function nodes.applyvisuals(n,mode)
+ applyvisuals(tonut(n),mode)
+ end
+
+ function visualizers.attribute(mode)
+ return cached[mode]
end
- apply_to_nodes(n,function(n) setattr(n,a_visual,a) end)
+
+ visualizers.attributes = cached
+
end
function nuts.copyvisual(n,m)
@@ -348,6 +368,7 @@ local c_negative = "trace:r"
local c_zero = "trace:g"
local c_text = "trace:s"
local c_space = "trace:y"
+local c_space_x = "trace:m"
local c_skip_a = "trace:c"
local c_skip_b = "trace:m"
local c_glyph = "trace:o"
@@ -355,13 +376,16 @@ local c_ligature = "trace:s"
local c_white = "trace:w"
local c_math = "trace:r"
local c_origin = "trace:o"
-local c_discretionary = "trace:o"
+local c_discretionary = "trace:d"
+local c_expansion = "trace:o"
+local c_depth = "trace:o"
local c_positive_d = "trace:db"
local c_negative_d = "trace:dr"
local c_zero_d = "trace:dg"
local c_text_d = "trace:ds"
local c_space_d = "trace:dy"
+local c_space_x_d = "trace:dm"
local c_skip_a_d = "trace:dc"
local c_skip_b_d = "trace:dm"
local c_glyph_d = "trace:do"
@@ -369,7 +393,9 @@ local c_ligature_d = "trace:ds"
local c_white_d = "trace:dw"
local c_math_d = "trace:dr"
local c_origin_d = "trace:do"
-local c_discretionary_d = "trace:do"
+local c_discretionary_d = "trace:dd"
+local c_expansion_d = "trace:do"
+local c_depth_d = "trace:do"
local function sometext(str,layer,color,textcolor,lap) -- we can just paste verbatim together .. no typesteting needed
local text = hpack_string(str,usedfont)
@@ -389,7 +415,7 @@ local function sometext(str,layer,color,textcolor,lap) -- we can just paste verb
if lap then
info = new_hlist(setlink(new_kern(-width),info))
else
- info = new_hlist(info)
+ info = new_hlist(info) -- a bit overkill: double wrapped
end
if layer then
setattr(info,a_layer,layer)
@@ -397,21 +423,41 @@ local function sometext(str,layer,color,textcolor,lap) -- we can just paste verb
return info, width
end
-local caches = table.setmetatableindex("table")
+local function someblob(str,layer,color,textcolor,width)
+ local text = hpack_string(str,usedfont)
+ local size = getwidth(text)
+ local rule = new_rule(width,2*exheight,exheight/2)
+ local kern = new_kern(-width + (width-size)/2)
+ if color then
+ setcolor(rule,color)
+ end
+ if textcolor then
+ setlistcolor(getlist(text),textcolor)
+ end
+ local info = setlink(rule,kern,text)
+ setlisttransparency(info,c_zero)
+ info = hpack_nodes(info)
+ local width = getwidth(info)
+ info = new_hlist(info)
+ if layer then
+ setattr(info,a_layer,layer)
+ end
+ return info, width
+end
+
+local caches = setmetatableindex("table")
-local fontkern do
+local fontkern, italickern do
local f_cache = caches["fontkern"]
+ local i_cache = caches["italickern"]
- fontkern = function(head,current)
+ local function somekern(head,current,cache,color,layer)
local width = getkern(current)
local extra = getfield(current,"expansion_factor")
local kern = width + extra
- local info = f_cache[kern]
- -- report_visualize("fontkern: %p ex %p",width,extra)
- if info then
- -- print("hit fontkern")
- else
+ local info = cache[kern]
+ if not info then
local text = hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
local rule = new_rule(emwidth/fraction,6*exheight,2*exheight)
local list = getlist(text)
@@ -422,25 +468,102 @@ local fontkern do
else
setlistcolor(list,c_zero_d)
end
- setlisttransparency(list,c_text_d)
- setcolor(rule,c_text_d)
- settransparency(rule,c_text_d)
+ setlisttransparency(list,color)
+ setcolor(rule,color)
+ settransparency(rule,color)
setshift(text,-5 * exheight)
info = new_hlist(setlink(rule,text))
- setattr(info,a_layer,l_fontkern)
+ setattr(info,a_layer,layer)
f_cache[kern] = info
end
head = insert_node_before(head,current,copy_list(info))
return head, current
end
+ fontkern = function(head,current)
+ return somekern(head,current,f_cache,c_text_d,l_fontkern)
+ end
+
+ italickern = function(head,current)
+ return somekern(head,current,i_cache,c_glyph_d,l_italic)
+ end
+
+end
+
+local glyphexpansion do
+
+ local f_cache = caches["glyphexpansion"]
+
+ glyphexpansion = function(head,current)
+ local extra = getfield(current,"expansion_factor")
+ if extra ~= 0 then
+ extra = extra / 1000
+ local info = f_cache[extra]
+ if not info then
+ local text = hpack_string(compactfloat(extra,"%.1f"),usedfont)
+ local rule = new_rule(emwidth/fraction,exheight,2*exheight)
+ local list = getlist(text)
+ if extra > 0 then
+ setlistcolor(list,c_positive_d)
+ elseif extra < 0 then
+ setlistcolor(list,c_negative_d)
+ end
+ setlisttransparency(list,c_text_d)
+ setcolor(rule,c_text_d)
+ settransparency(rule,c_text_d)
+ setshift(text,1.5 * exheight)
+ info = new_hlist(setlink(rule,text))
+ setattr(info,a_layer,l_expansion)
+ f_cache[extra] = info
+ end
+ head = insert_node_before(head,current,copy_list(info))
+ return head, current
+ end
+ return head, current
+ end
+
+end
+
+local kernexpansion do
+
+ local f_cache = caches["kernexpansion"]
+
+ kernexpansion = function(head,current)
+ local extra = getfield(current,"expansion_factor")
+ if extra ~= 0 then
+ extra = extra / 1000
+ local info = f_cache[extra]
+ if not info then
+ local text = hpack_string(compactfloat(extra,"%.1f"),usedfont)
+ local rule = new_rule(emwidth/fraction,exheight,4*exheight)
+ local list = getlist(text)
+ if extra > 0 then
+ setlistcolor(list,c_positive_d)
+ elseif extra < 0 then
+ setlistcolor(list,c_negative_d)
+ end
+ setlisttransparency(list,c_text_d)
+ setcolor(rule,c_text_d)
+ settransparency(rule,c_text_d)
+ setshift(text,3.5 * exheight)
+ info = new_hlist(setlink(rule,text))
+ setattr(info,a_layer,l_expansion)
+ f_cache[extra] = info
+ end
+ head = insert_node_before(head,current,copy_list(info))
+ return head, current
+ end
+ return head, current
+ end
+
end
local whatsit do
- local w_cache = caches["whatsit"]
+ local whatsitcodes = nodes.whatsitcodes
+ local w_cache = caches["whatsit"]
- local tags = {
+ local tags = {
open = "FIC",
write = "FIW",
close = "FIC",
@@ -506,12 +629,12 @@ end
local math do
- local m_cache = {
+ local mathcodes = nodes.mathcodes
+ local m_cache = {
beginmath = caches["bmath"],
endmath = caches["emath"],
}
-
- local tags = {
+ local tags = {
beginmath = "B",
endmath = "E",
}
@@ -543,12 +666,27 @@ local math do
end
+local ruleddepth do
+
+ ruleddepth = function(current,wd,ht,dp)
+ local wd, ht, dp = getwhd(current)
+ if dp ~= 0 then
+ local rule = new_rule(wd,0,dp)
+ setcolor(rule,c_depth)
+ settransparency(rule,c_zero)
+ setattr(rule,a_layer,l_depth)
+ setlist(current,setlink(rule,new_kern(-wd),getlist(current)))
+ end
+ end
+
+end
+
local ruledbox do
local b_cache = caches["box"]
local o_cache = caches["origin"]
- table.setmetatableindex(o_cache,function(t,size)
+ setmetatableindex(o_cache,function(t,size)
local rule = new_rule(2*size,size,size)
local origin = hpack_nodes(rule)
setcolor(rule,c_origin_d)
@@ -572,30 +710,30 @@ local ruledbox do
setboth(current)
local linewidth = emwidth/fraction
local size = 2*linewidth
- local baseline, baseskip
- if dp ~= 0 and ht ~= 0 then
- if wd > 20*linewidth then
- local targetsize = wd - size
- baseline = b_cache[targetsize]
- if not baseline then
- -- due to an optimized leader color/transparency we need to set the glue node in order
- -- to trigger this mechanism
- local leader = setlink(new_glue(size),new_rule(3*size,linewidth,0),new_glue(size))
- leader = hpack_nodes(leader)
- baseline = new_glue(0,65536,0,2,0)
- setleader(baseline,leader)
- setsubtype(baseline,cleaders_code)
- setlisttransparency(baseline,c_text)
- baseline = hpack_nodes(baseline,targetsize)
- b_cache[targetsize] = baseline
- end
- baseline = copy_list(baseline)
- baseskip = new_kern(-wd+linewidth)
- else
- baseline = new_rule(wd-size,linewidth,0)
- baseskip = new_kern(-wd+size)
- end
- end
+ -- local baseline, baseskip
+ -- if dp ~= 0 and ht ~= 0 then
+ -- if wd > 20*linewidth then
+ -- local targetsize = wd - size
+ -- baseline = b_cache[targetsize]
+ -- if not baseline then
+ -- -- due to an optimized leader color/transparency we need to set the glue node in order
+ -- -- to trigger this mechanism
+ -- local leader = setlink(new_glue(size),new_rule(3*size,linewidth,0),new_glue(size))
+ -- leader = hpack_nodes(leader)
+ -- baseline = new_glue(0,65536,0,2,0)
+ -- setleader(baseline,leader)
+ -- setsubtype(baseline,cleaders_code)
+ -- setlisttransparency(baseline,c_text)
+ -- baseline = hpack_nodes(baseline,targetsize)
+ -- b_cache[targetsize] = baseline
+ -- end
+ -- baseline = copy_list(baseline)
+ -- baseskip = new_kern(-wd+linewidth)
+ -- else
+ -- baseline = new_rule(wd-size,linewidth,0)
+ -- baseskip = new_kern(-wd+size)
+ -- end
+ -- end
local this
if not simple then
this = b_cache[what]
@@ -608,16 +746,31 @@ local ruledbox do
end
end
-- we need to trigger the right mode (else sometimes no whatits)
+ -- local info = setlink(
+ -- this and copy_list(this) or nil,
+ -- new_rule(linewidth,ht,dp),
+ -- new_rule(wd-size,-dp+linewidth,dp),
+ -- new_rule(linewidth,ht,dp),
+ -- new_kern(-wd+linewidth),
+ -- new_rule(wd-size,ht,-ht+linewidth),
+ -- baseskip,
+ -- baseskip and baseline or nil
+ -- )
+ --
+ -- userrules:
+ --
local info = setlink(
this and copy_list(this) or nil,
- new_rule(linewidth,ht,dp),
- new_rule(wd-size,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
- new_rule(wd-size,ht,-ht+linewidth),
- baseskip,
- baseskip and baseline or nil
+ userrule {
+ width = wd,
+ height = ht,
+ depth = dp,
+ line = linewidth,
+ type = "box",
+ dashed = 3*size,
+ }
)
+ --
setlisttransparency(info,c_text)
info = new_hlist(info)
--
@@ -665,13 +818,13 @@ local ruledbox do
if next then
setlink(info,next)
end
- if prev then
- if getid(prev) == gluespec_code then
- report_visualize("ignoring invalid prev")
- -- weird, how can this happen, an inline glue-spec, probably math
- else
+ if prev and prev > 0 then
+ -- if getid(prev) == gluespec_code then
+ -- report_visualize("ignoring invalid prev")
+ -- -- weird, how can this happen, an inline glue-spec, probably math
+ -- else
setlink(prev,info)
- end
+ -- end
end
if head == current then
return info, info
@@ -687,6 +840,10 @@ end
local ruledglyph do
+ -- see boundingbox feature .. maybe a pdf stream is more efficient, after all we
+ -- have a frozen color anyway or i need a more detailed cache .. below is a more
+ -- texie approach
+
ruledglyph = function(head,current,previous) -- wrong for vertical glyphs
local wd = getwidth(current)
-- local wd = chardata[getfont(current)][getchar(current)].width
@@ -700,22 +857,54 @@ local ruledglyph do
local prev = previous
setboth(current)
local linewidth = emwidth/(2*fraction)
- local baseline
- -- if dp ~= 0 and ht ~= 0 then
- if (dp >= 0 and ht >= 0) or (dp <= 0 and ht <= 0) then
- baseline = new_rule(wd-2*linewidth,linewidth,0)
- end
- local doublelinewidth = 2*linewidth
- -- could be a pdf rule (or a user rule now)
- local info = setlink(
- new_rule(linewidth,ht,dp),
- new_rule(wd-doublelinewidth,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
- new_rule(wd-doublelinewidth,ht,-ht+linewidth),
- new_kern(-wd+doublelinewidth),
- baseline
+ local info
+ --
+ -- original
+ --
+ -- local baseline
+ -- if (dp >= 0 and ht >= 0) or (dp <= 0 and ht <= 0) then
+ -- baseline = new_rule(wd-2*linewidth,linewidth,0)
+ -- end
+ -- local doublelinewidth = 2*linewidth
+ -- -- could be a pdf rule (or a user rule now)
+ -- info = setlink(
+ -- new_rule(linewidth,ht,dp),
+ -- new_rule(wd-doublelinewidth,-dp+linewidth,dp),
+ -- new_rule(linewidth,ht,dp),
+ -- new_kern(-wd+linewidth),
+ -- new_rule(wd-doublelinewidth,ht,-ht+linewidth),
+ -- new_kern(-wd+doublelinewidth),
+ -- baseline
+ -- )
+ --
+ -- experiment with subtype outline
+ --
+ -- if (dp >= 0 and ht >= 0) or (dp <= 0 and ht <= 0) then
+ -- baseline = new_rule(wd,linewidth/2,0)
+ -- end
+ -- local r = new_rule(wd-linewidth,ht-linewidth/4,dp-linewidth/4)
+ -- setsubtype(r,nodes.rulecodes.outline)
+ -- setfield(r,"transform",linewidth)
+ -- info = setlink(
+ -- new_kern(linewidth/4),
+ -- r,
+ -- new_kern(-wd+linewidth/2),
+ -- baseline
+ -- )
+ --
+ -- userrules:
+ --
+ info = setlink(
+ userrule {
+ width = wd,
+ height = ht,
+ depth = dp,
+ line = linewidth,
+ type = "box",
+ },
+ new_kern(-wd)
)
+ --
local char = chardata[getfont(current)][getchar(current)]
if char and type(char.unicode) == "table" then -- hackery test
setlistcolor(info,c_ligature)
@@ -745,37 +934,49 @@ local ruledglyph do
end
end
+ function visualizers.setruledglyph(f)
+ ruledglyph = f or ruledglyph
+ end
+
end
local ruledglue do
+ local gluecodes = nodes.gluecodes
+ local cleaders_code = gluecodes.cleaders
+ local userskip_code = gluecodes.userskip
+ local space_code = gluecodes.spaceskip
+ local xspace_code = gluecodes.xspaceskip
+ local leftskip_code = gluecodes.leftskip
+ local rightskip_code = gluecodes.rightskip
+
local g_cache_v = caches["vglue"]
local g_cache_h = caches["hglue"]
local tags = {
-- userskip = "US",
- lineskip = "LS",
- baselineskip = "BS",
- parskip = "PS",
- abovedisplayskip = "DA",
- belowdisplayskip = "DB",
- abovedisplayshortskip = "SA",
- belowdisplayshortskip = "SB",
- leftskip = "LS",
- rightskip = "RS",
- topskip = "TS",
- splittopskip = "ST",
- tabskip = "AS",
- spaceskip = "SS",
- xspaceskip = "XS",
- parfillskip = "PF",
- thinmuskip = "MS",
- medmuskip = "MM",
- thickmuskip = "ML",
- leaders = "NL",
- cleaders = "CL",
- xleaders = "XL",
- gleaders = "GL",
+ [gluecodes.lineskip] = "LS",
+ [gluecodes.baselineskip] = "BS",
+ [gluecodes.parskip] = "PS",
+ [gluecodes.abovedisplayskip] = "DA",
+ [gluecodes.belowdisplayskip] = "DB",
+ [gluecodes.abovedisplayshortskip] = "SA",
+ [gluecodes.belowdisplayshortskip] = "SB",
+ [gluecodes.leftskip] = "LS",
+ [gluecodes.rightskip] = "RS",
+ [gluecodes.topskip] = "TS",
+ [gluecodes.splittopskip] = "ST",
+ [gluecodes.tabskip] = "AS",
+ [gluecodes.spaceskip] = "SP",
+ [gluecodes.xspaceskip] = "XS",
+ [gluecodes.parfillskip] = "PF",
+ [gluecodes.thinmuskip] = "MS",
+ [gluecodes.medmuskip] = "MM",
+ [gluecodes.thickmuskip] = "ML",
+ [gluecodes.leaders] = "NL",
+ [gluecodes.cleaders] = "CL",
+ [gluecodes.xleaders] = "XL",
+ [gluecodes.gleaders] = "GL",
-- true = "VS",
-- false = "HS",
}
@@ -790,7 +991,7 @@ local ruledglue do
if info then
-- print("glue hit")
else
- if subtype == space_code or subtype == xspace_code then -- not yet all space
+ if subtype == space_code or subtype == xspace_code then
info = sometext(amount,l_glue,c_space)
elseif subtype == leftskip_code or subtype == rightskip_code then
info = sometext(amount,l_glue,c_skip_a)
@@ -815,6 +1016,55 @@ local ruledglue do
return head, getnext(current)
end
+ -- ruledspace = function(head,current,parent)
+ -- local subtype = getsubtype(current)
+ -- if subtype == space_code or subtype == xspace_code then
+ -- local width = effectiveglue(current,parent)
+ -- local amount = formatters["%s:%0.3f"](tags[subtype] or "HS",width*pt_factor)
+ -- local info = g_cache_h[amount]
+ -- if info then
+ -- -- print("space hit")
+ -- else
+ -- info = sometext(amount,l_glue,c_space)
+ -- g_cache_h[amount] = info
+ -- end
+ -- info = copy_list(info)
+ -- head, current = insert_node_before(head,current,info)
+ -- return head, getnext(current)
+ -- else
+ -- return head, current
+ -- end
+ -- end
+
+ local g_cache_s = caches["space"]
+ local g_cache_x = caches["xspace"]
+
+ ruledspace = function(head,current,parent)
+ local subtype = getsubtype(current)
+ if subtype == space_code or subtype == xspace_code then -- not yet all space
+ local width = effectiveglue(current,parent)
+ local info
+ if subtype == space_code then
+ info = g_cache_s[width]
+ if not info then
+ info = someblob("SP",l_glue,c_space,nil,width)
+ g_cache_s[width] = info
+ end
+ else
+ info = g_cache_x[width]
+ if not info then
+ info = someblob("XS",l_glue,c_space_x,nil,width)
+ g_cache_x[width] = info
+ end
+ end
+ info = copy_list(info)
+ head, current = insert_node_before(head,current,info)
+ return head, getnext(current)
+ else
+ return head, current
+ end
+ end
+
end
local ruledkern do
@@ -823,11 +1073,10 @@ local ruledkern do
local k_cache_h = caches["hkern"]
ruledkern = function(head,current,vertical)
- local kern = getkern(current)
- local info = (vertical and k_cache_v or k_cache_h)[kern]
- if info then
- -- print("kern hit")
- else
+ local kern = getkern(current)
+ local cache = vertical and k_cache_v or k_cache_h
+ local info = cache[kern]
+ if not info then
local amount = formatters["%s:%0.3f"](vertical and "VK" or "HK",kern*pt_factor)
if kern > 0 then
info = sometext(amount,l_kern,c_positive)
@@ -836,7 +1085,7 @@ local ruledkern do
else
info = sometext(amount,l_kern,c_zero)
end
- (vertical and k_cache_v or k_cache_h)[kern] = info
+ cache[kern] = info
end
info = copy_list(info)
if vertical then
@@ -850,14 +1099,12 @@ end
local ruleditalic do
- local i_cache = caches["itatalic"]
+ local i_cache = caches["italic"]
ruleditalic = function(head,current)
local kern = getkern(current)
local info = i_cache[kern]
- if info then
- -- print("kern hit")
- else
+ if not info then
local amount = formatters["%s:%0.3f"]("IC",kern*pt_factor)
if kern > 0 then
info = sometext(amount,l_kern,c_positive)
@@ -930,152 +1177,204 @@ local ruledpenalty do
end
-local function visualize(head,vertical,forced,parent)
- local trace_hbox = false
- local trace_vbox = false
- local trace_vtop = false
- local trace_kern = false
- local trace_glue = false
- local trace_penalty = false
- local trace_fontkern = false
- local trace_strut = false
- local trace_whatsit = false
- local trace_glyph = false
- local trace_simple = false
- local trace_user = false
- local trace_math = false
- local trace_italic = false
- local trace_origin = false
- local current = head
- local previous = nil
- local attr = unsetvalue
- local prev_trace_fontkern = nil
- while current do
- local id = getid(current)
- local a = forced or getattr(current,a_visual) or unsetvalue
- if a ~= attr then
- prev_trace_fontkern = trace_fontkern
- if a == unsetvalue then
- trace_hbox = false
- trace_vbox = false
- trace_vtop = false
- trace_kern = false
- trace_glue = false
- trace_penalty = false
- trace_fontkern = false
- trace_strut = false
- trace_whatsit = false
- trace_glyph = false
- trace_simple = false
- trace_user = false
- trace_math = false
- trace_italic = false
- trace_origin = false
- trace_discretionary = false
- else -- dead slow:
- trace_hbox = hasbit(a, 1)
- trace_vbox = hasbit(a, 2)
- trace_vtop = hasbit(a, 4)
- trace_kern = hasbit(a, 8)
- trace_glue = hasbit(a, 16)
- trace_penalty = hasbit(a, 32)
- trace_fontkern = hasbit(a, 64)
- trace_strut = hasbit(a, 128)
- trace_whatsit = hasbit(a, 256)
- trace_glyph = hasbit(a, 512)
- trace_simple = hasbit(a, 1024)
- trace_user = hasbit(a, 2048)
- trace_math = hasbit(a, 4096)
- trace_italic = hasbit(a, 8192)
- trace_origin = hasbit(a,16384)
- trace_discretionary = hasbit(a,32768)
- end
- attr = a
- end
- if trace_strut then
- setattr(current,a_layer,l_strut)
- elseif id == glyph_code then
- if trace_glyph then
- head, current = ruledglyph(head,current,previous)
- end
- elseif id == disc_code then
- if trace_discretionary then
- head, current = ruleddiscretionary(head,current)
- end
- local pre, post, replace = getdisc(current)
- if pre then
- pre = visualize(pre,false,a,parent)
- end
- if post then
- post = visualize(post,false,a,parent)
- end
- if replace then
- replace = visualize(replace,false,a,parent)
+do
+
+ local disc_code = nodecodes.disc
+ local kern_code = nodecodes.kern
+ local glyph_code = nodecodes.glyph
+ local glue_code = nodecodes.glue
+ local penalty_code = nodecodes.penalty
+ local whatsit_code = nodecodes.whatsit
+ local user_code = nodecodes.user
+ local math_code = nodecodes.math
+ local hlist_code = nodecodes.hlist
+ local vlist_code = nodecodes.vlist
+
+ local kerncodes = nodes.kerncodes
+ local font_kern_code = kerncodes.fontkern
+ local italic_kern_code = kerncodes.italiccorrection
+ ----- user_kern_code = kerncodes.userkern
+
+ local listcodes = nodes.listcodes
+ local line_code = listcodes.line
+
+ local function visualize(head,vertical,forced,parent)
+ local trace_hbox = false
+ local trace_vbox = false
+ local trace_vtop = false
+ local trace_kern = false
+ local trace_glue = false
+ local trace_penalty = false
+ local trace_fontkern = false
+ local trace_strut = false
+ local trace_whatsit = false
+ local trace_glyph = false
+ local trace_simple = false
+ local trace_user = false
+ local trace_math = false
+ local trace_italic = false
+ local trace_origin = false
+ local trace_discretionary = false
+ local trace_expansion = false
+ local trace_line = false
+ local trace_space = false
+ local trace_depth = false
+ local current = head
+ local previous = nil
+ local attr = unsetvalue
+ local prev_trace_fontkern = nil
+ local prev_trace_italic = nil
+ local prev_trace_expansion = nil
+ while current do
+ local id = getid(current)
+ local a = forced or getattr(current,a_visual) or unsetvalue
+ if a ~= attr then
+ prev_trace_fontkern = trace_fontkern
+ prev_trace_italic = trace_italic
+ prev_trace_expansion = trace_expansion
+ if a == unsetvalue then
+ trace_hbox = false
+ trace_vbox = false
+ trace_vtop = false
+ trace_kern = false
+ trace_glue = false
+ trace_penalty = false
+ trace_fontkern = false
+ trace_strut = false
+ trace_whatsit = false
+ trace_glyph = false
+ trace_simple = false
+ trace_user = false
+ trace_math = false
+ trace_italic = false
+ trace_origin = false
+ trace_discretionary = false
+ trace_expansion = false
+ trace_line = false
+ trace_space = false
+ trace_depth = false
+ else -- dead slow:
+ trace_hbox = band(a, 1) ~= 0
+ trace_vbox = band(a, 2) ~= 0
+ trace_vtop = band(a, 4) ~= 0
+ trace_kern = band(a, 8) ~= 0
+ trace_glue = band(a, 16) ~= 0
+ trace_penalty = band(a, 32) ~= 0
+ trace_fontkern = band(a, 64) ~= 0
+ trace_strut = band(a, 128) ~= 0
+ trace_whatsit = band(a, 256) ~= 0
+ trace_glyph = band(a, 512) ~= 0
+ trace_simple = band(a, 1024) ~= 0
+ trace_user = band(a, 2048) ~= 0
+ trace_math = band(a, 4096) ~= 0
+ trace_italic = band(a, 8192) ~= 0
+ trace_origin = band(a, 16384) ~= 0
+ trace_discretionary = band(a, 32768) ~= 0
+ trace_expansion = band(a, 65536) ~= 0
+ trace_line = band(a,131072) ~= 0
+ trace_space = band(a,262144) ~= 0
+ trace_depth = band(a,524288) ~= 0
+ end
+ attr = a
end
- setdisc(current,pre,post,replace)
- elseif id == kern_code then
- local subtype = getsubtype(current)
- -- tricky ... we don't copy the trace attribute in node-inj (yet)
- if subtype == font_kern_code or getattr(current,a_fontkern) then
- if trace_fontkern or prev_trace_fontkern then
- head, current = fontkern(head,current)
+ if trace_strut then
+ setattr(current,a_layer,l_strut)
+ elseif id == glyph_code then
+ if trace_glyph then
+ head, current = ruledglyph(head,current,previous)
end
- else -- if subtype == user_kern_code then
- if trace_italic then
- head, current = ruleditalic(head,current)
- elseif trace_kern then
- head, current = ruledkern(head,current,vertical)
+ if trace_expansion then
+ head, current = glyphexpansion(head,current)
+ end
+ elseif id == disc_code then
+ if trace_discretionary then
+ head, current = ruleddiscretionary(head,current)
+ end
+ local pre, post, replace = getdisc(current)
+ if pre then
+ pre = visualize(pre,false,a,parent)
+ end
+ if post then
+ post = visualize(post,false,a,parent)
+ end
+ if replace then
+ replace = visualize(replace,false,a,parent)
+ end
+ setdisc(current,pre,post,replace)
+ elseif id == kern_code then
+ local subtype = getsubtype(current)
+ if subtype == font_kern_code then
+ if trace_fontkern or prev_trace_fontkern then
+ head, current = fontkern(head,current)
+ end
+ if trace_expansion or prev_trace_expansion then
+ head, current = kernexpansion(head,current)
+ end
+ elseif subtype == italic_kern_code then
+ if trace_italic or prev_trace_italic then
+ head, current = italickern(head,current)
+ elseif trace_kern then
+ head, current = ruleditalic(head,current)
+ end
+ else
+ if trace_kern then
+ head, current = ruledkern(head,current,vertical)
+ end
+ end
+ elseif id == glue_code then
+ local content = getleader(current)
+ if content then
+ setleader(current,visualize(content,false,nil,parent))
+ elseif trace_glue then
+ head, current = ruledglue(head,current,vertical,parent)
+ elseif trace_space then
+ head, current = ruledspace(head,current,parent)
+ end
+ elseif id == penalty_code then
+ if trace_penalty then
+ head, current = ruledpenalty(head,current,vertical)
+ end
+ elseif id == hlist_code then
+ local content = getlist(current)
+ if content then
+ setlist(current,visualize(content,false,nil,current))
+ end
+ if trace_depth then
+ ruleddepth(current)
+ end
+ if trace_line and getsubtype(current) == line_code then
+ head, current = ruledbox(head,current,false,l_line,"L__",trace_simple,previous,trace_origin,parent)
+ elseif trace_hbox then
+ head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple,previous,trace_origin,parent)
+ end
+ elseif id == vlist_code then
+ local content = getlist(current)
+ if content then
+ setlist(current,visualize(content,true,nil,current))
+ end
+ if trace_vtop then
+ head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple,previous,trace_origin,parent)
+ elseif trace_vbox then
+ head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple,previous,trace_origin,parent)
+ end
+ elseif id == whatsit_code then
+ if trace_whatsit then
+ head, current = whatsit(head,current)
+ end
+ elseif id == user_code then
+ if trace_user then
+ head, current = user(head,current)
+ end
+ elseif id == math_code then
+ if trace_math then
+ head, current = math(head,current)
end
end
- elseif id == glue_code then
- local content = getleader(current)
- if content then
- setleader(current,visualize(content,false,nil,parent))
- elseif trace_glue then
- head, current = ruledglue(head,current,vertical,parent)
- end
- elseif id == penalty_code then
- if trace_penalty then
- head, current = ruledpenalty(head,current,vertical)
- end
- elseif id == hlist_code then
- local content = getlist(current)
- if content then
- setlist(current,visualize(content,false,nil,current))
- end
- if trace_hbox then
- head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple,previous,trace_origin,parent)
- end
- elseif id == vlist_code then
- local content = getlist(current)
- if content then
- setlist(current,visualize(content,true,nil,current))
- end
- if trace_vtop then
- head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple,previous,trace_origin,parent)
- elseif trace_vbox then
- head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple,previous,trace_origin,parent)
- end
- elseif id == whatsit_code then
- if trace_whatsit then
- head, current = whatsit(head,current)
- end
- elseif id == user_code then
- if trace_user then
- head, current = user(head,current)
- end
- elseif id == math_code then
- if trace_math then
- head, current = math(head,current)
- end
+ previous = current
+ current = getnext(current)
end
- previous = current
- current = getnext(current)
+ return head
end
- return head
-end
-
-do
local function cleanup()
for tag, cache in next, caches do
@@ -1103,28 +1402,31 @@ do
luatex.registerstopactions(cleanup)
-end
-
-function visualizers.box(n)
- if usedfont then
- starttiming(visualizers)
- local box = getbox(n)
- if box then
- setlist(box,visualize(getlist(box),getid(box) == vlist_code))
+ function visualizers.box(n)
+ if usedfont then
+ starttiming(visualizers)
+ local box = getbox(n)
+ if box then
+ setlist(box,visualize(getlist(box),getid(box) == vlist_code))
+ end
+ stoptiming(visualizers)
+ return head, true
+ else
+ return head, false
end
- stoptiming(visualizers)
- return head, true
- else
- return head, false
end
+
end
do
- local last = nil
- local used = nil
+ local hlist_code = nodecodes.hlist
+ local vlist_code = nodecodes.vlist
- local mark = {
+ local last = nil
+ local used = nil
+
+ local mark = {
"trace:1", "trace:2", "trace:3",
"trace:4", "trace:5", "trace:6",
"trace:7",
@@ -1164,40 +1466,44 @@ end)
-- interface
-local implement = interfaces.implement
+do
-implement {
- name = "setvisual",
- arguments = "string",
- actions = visualizers.setvisual
-}
+ local implement = interfaces.implement
-implement {
- name = "setvisuals",
- arguments = "string",
- actions = visualizers.setvisual
-}
+ implement {
+ name = "setvisual",
+ arguments = "string",
+ actions = visualizers.setvisual
+ }
-implement {
- name = "getvisual",
- arguments = "string",
- actions = { setvisual, context }
-}
+ implement {
+ name = "setvisuals",
+ arguments = "string",
+ actions = visualizers.setvisual
+ }
implement {
- name = "setvisuallayer",
- arguments = "string",
- actions = visualizers.setlayer
-}
+ name = "getvisual",
+ arguments = "string",
+ actions = { setvisual, context }
+ }
-implement {
- name = "markvisualfonts",
- arguments = "integer",
- actions = visualizers.markfonts
-}
+ implement {
+ name = "setvisuallayer",
+ arguments = "string",
+ actions = visualizers.setlayer
+ }
-implement {
- name = "setvisualfont",
- arguments = "integer",
- actions = visualizers.setfont
-}
+ implement {
+ name = "markvisualfonts",
+ arguments = "integer",
+ actions = visualizers.markfonts
+ }
+
+ implement {
+ name = "setvisualfont",
+ arguments = "integer",
+ actions = visualizers.setfont
+ }
+
+end
diff --git a/tex/context/base/mkiv/trac-vis.mkiv b/tex/context/base/mkiv/trac-vis.mkiv
index 894408222..a6a3fa5a2 100644
--- a/tex/context/base/mkiv/trac-vis.mkiv
+++ b/tex/context/base/mkiv/trac-vis.mkiv
@@ -1,6 +1,6 @@
%D \module
%D [ file=trac-vis, % replaces supp-vis plus some s-* modules
-%D version=2112.06.23, % 1996.10.21,
+%D version=2012.06.23, % 1996.10.21,
%D title=\CONTEXT\ Tracing Macros,
%D subtitle=Visualization,
%D author=Hans Hagen,
@@ -32,7 +32,7 @@
\writestatus{loading}{ConTeXt Tracing Macros / Visualization}
-\registerctxluafile{trac-vis}{1.001}
+\registerctxluafile{trac-vis}{optimize}
\unprotect
@@ -119,6 +119,12 @@
\unexpanded\def\showfontkerns
{\clf_setvisual{fontkern}}
+\unexpanded\def\showfontitalics
+ {\clf_setvisual{italic}}
+
+\unexpanded\def\showfontexpansion
+ {\clf_setvisual{expansion}}
+
\unexpanded\def\setvisualizerfont
{\dosingleempty\syst_visualizers_setfont}
diff --git a/tex/context/base/mkiv/type-ini.mkvi b/tex/context/base/mkiv/type-ini.mkvi
index 3969a5b95..2ac3ee207 100644
--- a/tex/context/base/mkiv/type-ini.mkvi
+++ b/tex/context/base/mkiv/type-ini.mkvi
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Typescript Macros / Initialization}
-\registerctxluafile{type-ini}{1.001}
+\registerctxluafile{type-ini}{}
%D The default fontclass is empty. We could demand always using fontclasses,
%D and then make the calling macros simplier (always assume fontclass) but
@@ -258,7 +258,7 @@
\def\font_typescripts_synonym_define[#name][#synonym]%
{\ifsecondargument\setevalue{\??typescriptsynonyms#name}{#synonym}\fi}
-\def\truetypescript#name% recursove so no \lastnamedcs
+\def\truetypescript#name% recursive so no \lastnamedcs
{\ifcsname\??typescriptsynonyms#name\endcsname
%\expandafter\truetypescript\csname\??typescriptsynonyms#name\endcsname
\expandafter\truetypescript\lastnamedcs
diff --git a/tex/context/base/mkiv/typo-bld.lua b/tex/context/base/mkiv/typo-bld.lua
index 153218eef..753748a2e 100644
--- a/tex/context/base/mkiv/typo-bld.lua
+++ b/tex/context/base/mkiv/typo-bld.lua
@@ -43,6 +43,7 @@ local new_baselineskip = nodepool.baselineskip
local new_lineskip = nodepool.lineskip
local insert_node_before = nodes.insert_before
local hpack_node = nodes.hpack
+local count_nodes = nodes.countall
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
@@ -193,9 +194,9 @@ function builders.vpack_filter(head,groupcode,size,packtype,maxdepth,direction)
if head then
starttiming(builders)
if trace_vpacking then
- local before = nodes.count(head)
+ local before = count_nodes(head)
head, done = vboxactions(head,groupcode,size,packtype,maxdepth,direction)
- local after = nodes.count(head)
+ local after = count_nodes(head)
if done then
nodes.processors.tracer("vpack","changed",head,groupcode,before,after,true)
else
diff --git a/tex/context/base/mkiv/typo-bld.mkiv b/tex/context/base/mkiv/typo-bld.mkiv
index 621d81c52..8929fa22c 100644
--- a/tex/context/base/mkiv/typo-bld.mkiv
+++ b/tex/context/base/mkiv/typo-bld.mkiv
@@ -35,7 +35,7 @@
\unprotect
-\registerctxluafile{typo-bld}{1.001}
+\registerctxluafile{typo-bld}{}
\definesystemattribute[parbuilder][public]
diff --git a/tex/context/base/mkiv/typo-brk.lua b/tex/context/base/mkiv/typo-brk.lua
index 84eff0654..51760bbf4 100644
--- a/tex/context/base/mkiv/typo-brk.lua
+++ b/tex/context/base/mkiv/typo-brk.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['typo-brk'] = {
-- this code dates from the beginning and is kind of experimental; it
-- will be optimized and improved soon
-local next, type, tonumber = next, type, tonumber
+local next, type, tonumber, tostring = next, type, tonumber, tostring
local utfbyte, utfchar = utf.byte, utf.char
local format = string.format
@@ -31,14 +31,12 @@ local getboth = nuts.getboth
local getsubtype = nuts.getsubtype
local getfont = nuts.getfont
local getid = nuts.getid
-local getfield = nuts.getfield
----- getattr = nuts.getattr
local getattrlist = nuts.getattrlist
local takeattr = nuts.takeattr
local getlang = nuts.getlang
local isglyph = nuts.isglyph
-local setfield = nuts.setfield
local setattr = nuts.setattr
local setattrlist = nuts.setattrlist
local setlink = nuts.setlink
diff --git a/tex/context/base/mkiv/typo-brk.mkiv b/tex/context/base/mkiv/typo-brk.mkiv
index 3f9ce6f3a..10b2f8e21 100644
--- a/tex/context/base/mkiv/typo-brk.mkiv
+++ b/tex/context/base/mkiv/typo-brk.mkiv
@@ -21,7 +21,7 @@
%
% -- we might eventually stick to only method 5
-\registerctxluafile{typo-brk}{1.001}
+\registerctxluafile{typo-brk}{}
\definesystemattribute[breakpoint][public,global]
diff --git a/tex/context/base/mkiv/typo-cap.lua b/tex/context/base/mkiv/typo-cap.lua
index 6bf4669df..4dffd1c49 100644
--- a/tex/context/base/mkiv/typo-cap.lua
+++ b/tex/context/base/mkiv/typo-cap.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['typo-cap'] = {
license = "see context related readme files"
}
-local next, type = next, type
+local next, type, tonumber = next, type, tonumber
local format, insert = string.format, table.insert
local div, getrandom = math.div, utilities.randomizer.get
@@ -21,7 +21,6 @@ local nuts = nodes.nuts
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
@@ -32,7 +31,6 @@ local getsubtype = nuts.getsubtype
local getchar = nuts.getchar
local getdisc = nuts.getdisc
-local setfield = nuts.setfield
local setattr = nuts.setattr
local setchar = nuts.setchar
local setfont = nuts.setfont
@@ -52,7 +50,7 @@ local kern_code = nodecodes.kern
local disc_code = nodecodes.disc
local math_code = nodecodes.math
-local kerning_code = kerncodes.kerning
+local fontkern_code = kerncodes.fontkern
local enableaction = nodes.tasks.enableaction
@@ -80,11 +78,10 @@ local a_cases = attributes.private("case")
local extract = bit32.extract
local run = 0 -- a trick to make neighbouring ranges work
-
local blocked = { }
local function set(tag,font)
- if run == 2^6 then
+ if run == 0x40 then -- 2^6
run = 1
else
run = run + 1
@@ -95,12 +92,18 @@ local function set(tag,font)
end
local function get(a)
- local font = extract(a,16,12) -- 4000
- local tag = extract(a, 8, 8) -- 250
- local run = extract(a, 0, 8) -- 50
- return tag, font, run
+ return
+ extract(a, 8, 8), -- tag
+ extract(a,16,12), -- font
+ extract(a, 0, 8) -- run
end
+-- local function get(a)
+-- return
+-- (a >> 8) & ~(-1 << 8), -- & 0x0FF -- tag
+-- (a >> 16) & ~(-1 << 12), -- & 0xFFF -- font
+-- (a >> 0) & ~(-1 << 8) -- & 0x0FF -- run
+-- end
-- print(get(set( 1, 0)))
-- print(get(set( 1, 99)))
@@ -427,7 +430,7 @@ function cases.handler(head) -- not real fast but also not used on much data
elseif id == math_code then
start = end_of_math(start)
count = 0
- elseif prev_id == kern_code and getsubtype(prev) == kerning_code then
+ elseif prev_id == kern_code and getsubtype(prev) == fontkern_code then
-- still inside a word ...normally kerns are added later
else
count = 0
@@ -535,7 +538,7 @@ end
-- elseif id == math_code then
-- start = end_of_math(start)
-- count = 0
--- elseif prev_id == kern_code and getsubtype(prev) == kerning_code then
+-- elseif prev_id == kern_code and getsubtype(prev) == fontkern_code then
-- -- still inside a word ...normally kerns are added later
-- else
-- count = 0
diff --git a/tex/context/base/mkiv/typo-cap.mkiv b/tex/context/base/mkiv/typo-cap.mkiv
index 96f3e28d6..4d1272e10 100644
--- a/tex/context/base/mkiv/typo-cap.mkiv
+++ b/tex/context/base/mkiv/typo-cap.mkiv
@@ -18,7 +18,7 @@
%D Maybe we need a more clever system: either command or style mode etc. so
%D that we can avoid the grouped mess in a simple style switch.
-\registerctxluafile{typo-cap}{1.001}
+\registerctxluafile{typo-cap}{optimize}
% \definesystemattribute[case][public] % already predefined
diff --git a/tex/context/base/mkiv/typo-chr.lua b/tex/context/base/mkiv/typo-chr.lua
index f6bcfde56..80497a492 100644
--- a/tex/context/base/mkiv/typo-chr.lua
+++ b/tex/context/base/mkiv/typo-chr.lua
@@ -88,6 +88,8 @@ if not modules then modules = { } end modules ['typo-chr'] = {
local insert, remove = table.insert, table.remove
+local context = context
+
local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local localpar_code = nodecodes.localpar
diff --git a/tex/context/base/mkiv/typo-chr.mkiv b/tex/context/base/mkiv/typo-chr.mkiv
index dc0c68664..c92c4562e 100644
--- a/tex/context/base/mkiv/typo-chr.mkiv
+++ b/tex/context/base/mkiv/typo-chr.mkiv
@@ -34,7 +34,7 @@
%D for instance when combining bit and pieces where keeping a state is complex compared
%D to cleaning up unwanted stuff.
-\registerctxluafile{typo-chr}{1.001}
+\registerctxluafile{typo-chr}{}
\definesystemattribute[marked][public]
diff --git a/tex/context/base/mkiv/typo-cln.lua b/tex/context/base/mkiv/typo-cln.lua
index 53452f838..b7187eaeb 100644
--- a/tex/context/base/mkiv/typo-cln.lua
+++ b/tex/context/base/mkiv/typo-cln.lua
@@ -10,6 +10,7 @@ if not modules then modules = { } end modules ['typo-cln'] = {
-- this case Dream Theaters' Octavium). Of course extensions will take
-- more time.
+local tonumber = tonumber
local utfbyte = utf.byte
local trace_cleaners = false trackers.register("typesetters.cleaners", function(v) trace_cleaners = v end)
@@ -36,7 +37,6 @@ local getchar = nuts.getchar
local getattr = nuts.getattr
local setattr = nuts.setattr
-local setfield = nuts.setfield
local setchar = nuts.setchar
local traverse_id = nuts.traverse_id
diff --git a/tex/context/base/mkiv/typo-cln.mkiv b/tex/context/base/mkiv/typo-cln.mkiv
index 193198dc5..efc278cfa 100644
--- a/tex/context/base/mkiv/typo-cln.mkiv
+++ b/tex/context/base/mkiv/typo-cln.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{typo-cln}{1.001}
+\registerctxluafile{typo-cln}{}
\definesystemattribute[cleaner][public]
diff --git a/tex/context/base/mkiv/typo-del.mkiv b/tex/context/base/mkiv/typo-del.mkiv
index 84fe2c469..d12be6bcf 100644
--- a/tex/context/base/mkiv/typo-del.mkiv
+++ b/tex/context/base/mkiv/typo-del.mkiv
@@ -140,6 +140,18 @@
% \def\beforesubsentence{\removeunwantedspaces}
% \def\aftersubsentence {\ignorespaces}
+\newconditional\c_typo_subsentence_cleanup \settrue\c_typo_subsentence_cleanup
+
+\def\typo_subsentence_cleanup_start
+ {\ifconditional\c_typo_subsentence_cleanup
+ \expandafter\ignorespaces
+ \fi}
+
+\def\typo_subsentence_cleanup_stop
+ {\ifconditional\c_typo_subsentence_cleanup
+ \removeunwantedspaces
+ \fi}
+
\unexpanded\def\midsentence
{\dostarttagged\t!subsentencesymbol\empty
\dotagsetsubsentencesymbol\s!middle
@@ -162,10 +174,11 @@
\symbol[\ifodd\c_typo_subsentence_nesting\c!leftsentence\else\c!leftsubsentence\fi]%
\dostoptagged
\dostarttagged\t!subsentencecontent\empty
- }% \ignorespaces}
+ \typo_subsentence_cleanup_start}
\unexpanded\def\endofsubsentence % relax prevents space gobbling
- {\dostoptagged
+ {\typo_subsentence_cleanup_stop
+ \dostoptagged
\dostarttagged\t!subsentencesymbol\empty
\dotagsetsubsentencesymbol\s!right
\symbol[\ifodd\c_typo_subsentence_nesting\c!rightsentence\else\c!rightsubsentence\fi]%
@@ -184,7 +197,7 @@
\unskip
\hskip\hspaceamount\currentusedlanguage{intersentence}%
% no good, actually language dependent:
-% \ignorespaces
+ % \ignorespaces
\else
\unskip
\fi}
@@ -199,9 +212,10 @@
%D \typebuffer
%D \getbuffer
-\unexpanded\def\startsubsentence{\beginofsubsentence \prewordbreak\beginofsubsentencespacing}
-\unexpanded\def\stopsubsentence {\endofsubsentencespacing\prewordbreak\endofsubsentence}
+\unexpanded\def\startsubsentence{\beginofsubsentence\prewordbreak\beginofsubsentencespacing\typo_subsentence_cleanup_start}
+\unexpanded\def\stopsubsentence {\typo_subsentence_cleanup_stop\endofsubsentencespacing\prewordbreak\endofsubsentence}
\unexpanded\def\subsentence {\groupedcommand\startsubsentence\stopsubsentence}
+\unexpanded\def\midsubsentence {\typo_subsentence_cleanup_start\prewordbreak\midsentence\prewordbreak\typo_subsentence_cleanup_stop}
\definehspace [quotation] [\zeropoint]
\definehspace [interquotation] [.125em]
@@ -369,7 +383,7 @@
%\c!language=\v!local,
\c!repeat=\v!no]
-\def\typo_delimited_repeat_ideed
+\def\typo_delimited_repeat_indeed
{\relax\ifcase\delimitedtextlevel\else
\typo_delimited_handle_middle\c!middle
\fi}
@@ -648,6 +662,11 @@
\typo_delimited_right_symbol#1%
\egroup}
+\appendtoks
+ \localleftbox {}%
+ \localrightbox{}%
+\to \everyforgetall
+
% \starttext
% \hyphenatedword{groepsvrijstellingsverordeningen}\par
% \hyphenatedword{\quote{groepsvrijstellingsverordeningen}}\par
@@ -785,7 +804,8 @@
\dostarttaggedchained\t!delimited\currentdelimitedtext\??delimitedtext
\usedelimitedtextstyleandcolor\c!style\c!color
\typo_delimited_start_content
- \typo_delimited_attributed_e
+ \bgroup
+ \aftergroup\typo_delimited_attributed_e
\let\next=}
\def\typo_delimited_attributed_e
@@ -801,6 +821,7 @@
\usedlanguageparameter{\c!left\currentparentdelimitedtext}% was: \currentdelimitedtext
\usedelimitedtextstyleandcolor\c!style\c!color
\typo_delimited_start_content
+ \bgroup
\aftergroup\typo_delimited_fontdriven_e
\let\next=}
diff --git a/tex/context/base/mkiv/typo-dha.lua b/tex/context/base/mkiv/typo-dha.lua
index a32f72e46..af01f0f0d 100644
--- a/tex/context/base/mkiv/typo-dha.lua
+++ b/tex/context/base/mkiv/typo-dha.lua
@@ -55,13 +55,11 @@ local getchar = nuts.getchar
local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
-local getfield = nuts.getfield
local getattr = nuts.getattr
local getprop = nuts.getprop
local getdir = nuts.getdir
local isglyph = nuts.isglyph -- or ischar
-local setfield = nuts.setfield
local setprop = nuts.setprop
local setchar = nuts.setchar
@@ -86,8 +84,6 @@ local parfillskip_code = skipcodes.parfillskip
local new_textdir = nodepool.textdir
-local hasbit = number.hasbit
-local formatters = string.formatters
local insert = table.insert
local fonthashes = fonts.hashes
diff --git a/tex/context/base/mkiv/typo-dig.lua b/tex/context/base/mkiv/typo-dig.lua
index 3d60131c7..61e96c6b6 100644
--- a/tex/context/base/mkiv/typo-dig.lua
+++ b/tex/context/base/mkiv/typo-dig.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['typo-dig'] = {
-- we might consider doing this after the otf pass because now osf do not work
-- out well in node mode.
-local next, type = next, type
+local next, type, tonumber = next, type, tonumber
local format, insert = string.format, table.insert
local round, div = math.round, math.div
@@ -29,7 +29,6 @@ local getfont = nuts.getfont
local getchar = nuts.getchar
local getid = nuts.getid
local getwidth = nuts.getwidth
-local getfield = nuts.getfield
local takeattr = nuts.takeattr
local setlink = nuts.setlink
diff --git a/tex/context/base/mkiv/typo-dig.mkiv b/tex/context/base/mkiv/typo-dig.mkiv
index aa610b3c9..0f957fbc1 100644
--- a/tex/context/base/mkiv/typo-dig.mkiv
+++ b/tex/context/base/mkiv/typo-dig.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{typo-dig}{1.001}
+\registerctxluafile{typo-dig}{}
\definesystemattribute[digits][public]
diff --git a/tex/context/base/mkiv/typo-dir.lua b/tex/context/base/mkiv/typo-dir.lua
index 5ecf77a1f..7fbf5f6d3 100644
--- a/tex/context/base/mkiv/typo-dir.lua
+++ b/tex/context/base/mkiv/typo-dir.lua
@@ -28,7 +28,6 @@ if not modules then modules = { } end modules ['typo-dir'] = {
local next, type = next, type
local format, insert, sub, find, match = string.format, table.insert, string.sub, string.find, string.match
-local formatters = string.formatters
local nodes, node = nodes, node
@@ -36,14 +35,19 @@ local trace_textdirections = false trackers.register("typesetters.directions.t
local trace_mathdirections = false trackers.register("typesetters.directions.math", function(v) trace_mathdirections = v end)
local trace_directions = false trackers.register("typesetters.directions", function(v) trace_textdirections = v trace_mathdirections = v end)
+local one_too = false directives.register("typesetters.directions.onetoo", function(v) one_too = v end)
+
local report_textdirections = logs.reporter("typesetting","text directions")
----- report_mathdirections = logs.reporter("typesetting","math directions")
-local hasbit = number.hasbit
+local band = bit32.band
local texsetattribute = tex.setattribute
local unsetvalue = attributes.unsetvalue
+local getnext = nodes.getnext
+local getattr = nodes.getattr
+
local enableaction = nodes.tasks.enableaction
local tracers = nodes.tracers
local setcolor = tracers.colors.set
@@ -62,9 +66,9 @@ local v_local = variables["local"]
local v_on = variables.on
local v_yes = variables.yes
-local m_enabled = 2^6 -- 64
-local m_global = 2^7
-local m_fences = 2^8
+local m_enabled = 0x00000040 -- 2^6 64
+local m_global = 0x00000080 -- 2^7
+local m_fences = 0x00000100 -- 2^8
local handlers = { }
local methods = { }
@@ -107,11 +111,11 @@ local function tomode(specification)
end
local function getglobal(a)
- return a and a > 0 and hasbit(a,m_global)
+ return a and a > 0 and band(a,m_global) ~= 0
end
local function getfences(a)
- return a and a > 0 and hasbit(a,m_fences)
+ return a and a > 0 and band(a,m_fences) ~= 0
end
local function getmethod(a)
@@ -155,11 +159,18 @@ local enabled = false
local starttiming = statistics.starttiming
local stoptiming = statistics.stoptiming
-function directions.handler(head) -- ,_,_,_,direction) -- nodes not nuts | 5th arg is direction
- if not head.next then
+-- If we have hbox{!} then the hbox determines the direction but we can consider
+-- a fast analysis, not that it matters much because there's nothing to swap in
+-- the list unless one glyphs becomes multiple (can that really happen?).
+--
+-- \enabledirectives[typesetters.directions.onetoo]
+
+function directions.handler(head,_,_,_,direction)
+ local only_one = not getnext(head)
+ if only_one and not one_too then
return head, false
end
- local attr = head[a_directions]
+ local attr = getattr(head,a_directions)
if not attr or attr == 0 then
return head, false
end
@@ -169,7 +180,7 @@ function directions.handler(head) -- ,_,_,_,direction) -- nodes not nuts | 5th a
return head, false
end
starttiming(directions)
- local head, done = handler(head)
+ local head, done = handler(head,direction,only_one)
stoptiming(directions)
return head, done
end
diff --git a/tex/context/base/mkiv/typo-dir.mkiv b/tex/context/base/mkiv/typo-dir.mkiv
index c5fd6fa41..d92c93793 100644
--- a/tex/context/base/mkiv/typo-dir.mkiv
+++ b/tex/context/base/mkiv/typo-dir.mkiv
@@ -18,11 +18,15 @@
\unprotect
-\registerctxluafile{typo-dir}{1.001}
-\registerctxluafile{typo-dha}{1.001}
-\registerctxluafile{typo-dua}{1.001}
-\registerctxluafile{typo-dub}{1.001}
-\registerctxluafile{typo-duc}{1.001}
+\registerctxluafile{typo-dir}{}
+\registerctxluafile{typo-dha}{}
+\registerctxluafile{typo-dua}{}
+\registerctxluafile{typo-dub}{}
+\doifelsefileexists{typo-duc-new.lua} {
+ \registerctxluafile{typo-duc-new}{}
+} {
+ \registerctxluafile{typo-duc}{}
+}
\definesystemattribute[directions][public]
@@ -81,14 +85,20 @@
\fi
\to \everysetupdirections
+\appendtoks
+ \edef\p_option{\directionsparameter\c!break}% name can change
+ \breakafterdirmode\ifx\p_option\v!both\plusone\else\zerocount\fi
+\to \everysetupdirections
+
% bidi: local=obey grouping, global=ignore grouping (unicode has no grouping)
\setupdirections % maybe start/stop
[\c!bidi=\v!off,
\c!method=\v!default,
+ \c!break=\v!both, % experimental value, maybe \v!no will be default (bad name too)
\c!fences=\v!yes]
-\unexpanded\edef\bidilre{\normalUchar"202A} % maybe \edef's
+\unexpanded\edef\bidilre{\normalUchar"202A}
\unexpanded\edef\bidirle{\normalUchar"202B}
\unexpanded\edef\bidipop{\normalUchar"202C}
\unexpanded\edef\bidilro{\normalUchar"202D}
@@ -131,7 +141,7 @@
end
\stopluacode
-\def\biditest#1#2#3% font text raw
+\unexpanded\def\biditest#1#2#3% font text raw
{\dontleavehmode\hbox
{\framed[offset=overlay]{\tttf#2}\quad
\enabletrackers[typesetters.directions]%
diff --git a/tex/context/base/mkiv/typo-drp.lua b/tex/context/base/mkiv/typo-drp.lua
index e27ad75f3..1e142280f 100644
--- a/tex/context/base/mkiv/typo-drp.lua
+++ b/tex/context/base/mkiv/typo-drp.lua
@@ -36,11 +36,9 @@ local getprev = nuts.getprev
local getchar = nuts.getchar
local getid = nuts.getid
local getsubtype = nuts.getsubtype
-local getfield = nuts.getfield
local getattr = nuts.getattr
local getwhd = nuts.getwhd
-local setfield = nuts.setfield
local setattr = nuts.setattr
local setlink = nuts.setlink
local setprev = nuts.setprev
diff --git a/tex/context/base/mkiv/typo-drp.mkiv b/tex/context/base/mkiv/typo-drp.mkiv
index 2520c3bfd..371ea38d6 100644
--- a/tex/context/base/mkiv/typo-drp.mkiv
+++ b/tex/context/base/mkiv/typo-drp.mkiv
@@ -17,7 +17,7 @@
\unprotect
-\registerctxluafile{typo-drp}{1.001}
+\registerctxluafile{typo-drp}{}
\definesystemattribute[initial][public]
diff --git a/tex/context/base/mkiv/typo-dua.lua b/tex/context/base/mkiv/typo-dua.lua
index c2f3c2763..1e48dfb91 100644
--- a/tex/context/base/mkiv/typo-dua.lua
+++ b/tex/context/base/mkiv/typo-dua.lua
@@ -57,7 +57,8 @@ if not modules then modules = { } end modules ['typo-dua'] = {
-- tood: combine some sweeps
--
-- This one wil get frozen (or if needed in sync with basic t-bidi) and I will explore more options
--- in typo-dub.lua. There I might also be able to improve performance a bit.
+-- in typo-dub.lua. There I might also be able to improve performance a bit. Derived and improved
+-- versions will also be sped up
local insert, remove, unpack, concat = table.insert, table.remove, table.unpack, table.concat
local utfchar = utf.char
@@ -75,11 +76,9 @@ local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getchar = nuts.getchar
-local getfield = nuts.getfield
local getprop = nuts.getprop
local getdir = nuts.getdir
-local setfield = nuts.setfield
local setprop = nuts.setprop
local setchar = nuts.setchar
local setdir = nuts.setdir
@@ -680,6 +679,8 @@ local function resolve_levels(list,size,baselevel)
end
end
+-- This is not ok but we keep it as-is:
+
local function insert_dir_points(list,size)
-- L2, but no actual reversion is done, we simply annotate where
-- begindir/endddir node will be inserted.
diff --git a/tex/context/base/mkiv/typo-dub.lua b/tex/context/base/mkiv/typo-dub.lua
index eea743c6d..d0747ae6c 100644
--- a/tex/context/base/mkiv/typo-dub.lua
+++ b/tex/context/base/mkiv/typo-dub.lua
@@ -64,11 +64,9 @@ local getsubtype = nuts.getsubtype
local getlist = nuts.getlist
local getchar = nuts.getchar
local getattr = nuts.getattr
-local getfield = nuts.getfield
local getprop = nuts.getprop
local getdir = nuts.getdir
-local setfield = nuts.setfield
local setprop = nuts.setprop
local setchar = nuts.setchar
local setdir = nuts.setdir
@@ -242,7 +240,7 @@ end
-- local space = { char = 0x0020, direction = "ws", original = "ws" }
-- local lre = { char = 0x202A, direction = "lre", original = "lre" }
--- local lre = { char = 0x202B, direction = "rle", original = "rle" }
+-- local rle = { char = 0x202B, direction = "rle", original = "rle" }
-- local pdf = { char = 0x202C, direction = "pdf", original = "pdf" }
-- local object = { char = 0xFFFC, direction = "on", original = "on" }
--
@@ -561,28 +559,30 @@ local function resolve_weak(list,size,start,limit,orderbefore,orderafter)
end
else -- probably more efficient
local runner = start + 2
- local before = list[start]
- local entry = list[start + 1]
- local after = list[runner]
- while after do
- local direction = entry.direction
- if direction == "es" then
- if before.direction == "en" and after.direction == "en" then
- entry.direction = "en"
- end
- elseif direction == "cs" then
- local prevdirection = before.direction
- if prevdirection == "en" then
- if after.direction == "en" then
+ if runner <= limit then
+ local before = list[start]
+ local entry = list[start + 1]
+ local after = list[runner]
+ while after do
+ local direction = entry.direction
+ if direction == "es" then
+ if before.direction == "en" and after.direction == "en" then
entry.direction = "en"
end
- elseif prevdirection == "an" and after.direction == "an" then
- entry.direction = "an"
+ elseif direction == "cs" then
+ local prevdirection = before.direction
+ if prevdirection == "en" then
+ if after.direction == "en" then
+ entry.direction = "en"
+ end
+ elseif prevdirection == "an" and after.direction == "an" then
+ entry.direction = "an"
+ end
end
+ before = current
+ current = after
+ after = list[runner]
end
- before = current
- current = after
- after = list[runner]
end
end
-- W5
@@ -814,11 +814,57 @@ local function resolve_levels(list,size,baselevel,analyze_fences)
end
end
+-- local function insert_dir_points(list,size)
+-- -- L2, but no actual reversion is done, we simply annotate where
+-- -- begindir/endddir node will be inserted.
+-- local maxlevel = 0
+-- local finaldir = false
+-- for i=1,size do
+-- local level = list[i].level
+-- if level > maxlevel then
+-- maxlevel = level
+-- end
+-- end
+-- for level=0,maxlevel do
+-- local started = false
+-- local begindir = nil
+-- local enddir = nil
+-- if level % 2 == 1 then
+-- begindir = "+TRT"
+-- enddir = "-TRT"
+-- else
+-- begindir = "+TLT"
+-- enddir = "-TLT"
+-- end
+-- for i=1,size do
+-- local entry = list[i]
+-- if entry.level >= level then
+-- if not started then
+-- entry.begindir = begindir
+-- started = true
+-- end
+-- else
+-- if started then
+-- list[i-1].enddir = enddir
+-- started = false
+-- end
+-- end
+-- end
+-- -- make sure to close the run at end of line
+-- if started then
+-- finaldir = enddir
+-- end
+-- end
+-- if finaldir then
+-- list[size].enddir = finaldir
+-- end
+-- end
+
local function insert_dir_points(list,size)
-- L2, but no actual reversion is done, we simply annotate where
-- begindir/endddir node will be inserted.
local maxlevel = 0
- local finaldir = false
+ local toggle = true
for i=1,size do
local level = list[i].level
if level > maxlevel then
@@ -826,15 +872,18 @@ local function insert_dir_points(list,size)
end
end
for level=0,maxlevel do
- local started = false
- local begindir = nil
- local enddir = nil
- if level % 2 == 1 then
- begindir = "+TRT"
- enddir = "-TRT"
- else
+ local started -- = false
+ local begindir -- = nil
+ local enddir -- = nil
+ local prev -- = nil
+ if toggle then
begindir = "+TLT"
enddir = "-TLT"
+ toggle = false
+ else
+ begindir = "+TRT"
+ enddir = "-TRT"
+ toggle = true
end
for i=1,size do
local entry = list[i]
@@ -845,18 +894,36 @@ local function insert_dir_points(list,size)
end
else
if started then
- list[i-1].enddir = enddir
- started = false
+ prev.enddir = enddir
+ started = false
end
end
- end
- -- make sure to close the run at end of line
- if started then
- finaldir = enddir
+ prev = entry
end
end
- if finaldir then
- list[size].enddir = finaldir
+ -- make sure to close the run at end of line
+ local last = list[size]
+ if not last.enddir then
+ local s = { }
+ local n = 0
+ for i=1,size do
+ local entry = list[i]
+ local e = entry.enddir
+ local b = entry.begindir
+ if e then
+ n = n - 1
+ end
+ if b then
+ n = n + 1
+ s[n] = b
+ end
+ end
+ if n > 0 then
+ if trace_list and n > 1 then
+ report_directions("unbalanced list")
+ end
+ last.enddir = s[n] == "+TRT" and "-TRT" or "-TLT"
+ end
end
end
diff --git a/tex/context/base/mkiv/typo-duc.lua b/tex/context/base/mkiv/typo-duc.lua
index 7fd49e54e..520740190 100644
--- a/tex/context/base/mkiv/typo-duc.lua
+++ b/tex/context/base/mkiv/typo-duc.lua
@@ -4,16 +4,18 @@ if not modules then modules = { } end modules ['typo-duc'] = {
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
license = "see context related readme files",
- comment = "Unicode bidi (sort of) variant b",
+ comment = "Unicode bidi (sort of) variant c",
}
--- This is a follow up on typo-uba which itself is a follow up on t-bidi by Khaled Hosny which
+-- Will be replaced by typo-duc-new.lua!
+
+-- This is a follow up on typo-uda which itself is a follow up on t-bidi by Khaled Hosny which
-- in turn is based on minibidi.c from Arabeyes. This is a further optimizations, as well as
-- an update on some recent unicode bidi developments. There is (and will) also be more control
-- added. As a consequence this module is somewhat slower than its precursor which itself is
-- slower than the one-pass bidi handler. This is also a playground and I might add some plugin
-- support. However, in the meantime performance got a bit better and this third variant is again
--- some 10% faster than the two variant.
+-- some 10% faster than the second variant.
-- todo (cf html):
--
@@ -62,20 +64,18 @@ local tonode = nuts.tonode
local getnext = nuts.getnext
local getid = nuts.getid
local getsubtype = nuts.getsubtype
-local getchar = nuts.getchar
local getlist = nuts.getlist
+local getchar = nuts.getchar
local getattr = nuts.getattr
-local getfield = nuts.getfield
local getprop = nuts.getprop
local getdir = nuts.getdir
-local setfield = nuts.setfield
local setprop = nuts.setprop
local setchar = nuts.setchar
local setdir = nuts.setdir
local setattrlist = nuts.setattrlist
-local properties = nodes.properties
+local properties = nodes.properties.data
local remove_node = nuts.remove
local insert_node_after = nuts.insert_after
@@ -107,11 +107,11 @@ local a_directions = attributes.private('directions')
local remove_controls = true directives.register("typesetters.directions.removecontrols",function(v) remove_controls = v end)
----- analyze_fences = true directives.register("typesetters.directions.analyzefences", function(v) analyze_fences = v end)
-local trace_directions = false trackers .register("typesetters.directions.two", function(v) trace_directions = v end)
-local trace_details = false trackers .register("typesetters.directions.two.details", function(v) trace_details = v end)
-local trace_list = false trackers .register("typesetters.directions.two.list", function(v) trace_list = v end)
+local trace_directions = false trackers.register("typesetters.directions.three", function(v) trace_directions = v end)
+local trace_details = false trackers.register("typesetters.directions.three.details", function(v) trace_details = v end)
+local trace_list = false trackers.register("typesetters.directions.three.list", function(v) trace_list = v end)
-local report_directions = logs.reporter("typesetting","directions two")
+local report_directions = logs.reporter("typesetting","directions three")
-- strong (old):
--
@@ -211,27 +211,28 @@ end
local function show_done(list,size)
local joiner = utfchar(0x200C)
local result = { }
+ local format = formatters["<%s>"]
for i=1,size do
local entry = list[i]
local character = entry.char
local begindir = entry.begindir
local enddir = entry.enddir
if begindir then
- result[#result+1] = formatters["<%s>"](begindir)
+ result[#result+1] = format(begindir)
end
if entry.remove then
-- continue
elseif character == 0xFFFC then
- result[#result+1] = formatters["<%s>"]("?")
+ result[#result+1] = format("?")
elseif character == 0x0020 then
- result[#result+1] = formatters["<%s>"](" ")
+ result[#result+1] = format(" ")
elseif character >= 0x202A and character <= 0x202C then
- result[#result+1] = formatters["<%s>"](entry.original)
+ result[#result+1] = format(entry.original)
else
result[#result+1] = utfchar(character)
end
if enddir then
- result[#result+1] = formatters["<%s>"](enddir)
+ result[#result+1] = format(enddir)
end
end
return concat(result,joiner)
@@ -246,26 +247,28 @@ end
local mt_space = { __index = { char = 0x0020, direction = "ws", original = "ws", level = 0 } }
local mt_lre = { __index = { char = 0x202A, direction = "lre", original = "lre", level = 0 } }
-local mt_lre = { __index = { char = 0x202B, direction = "rle", original = "rle", level = 0 } }
+local mt_rle = { __index = { char = 0x202B, direction = "rle", original = "rle", level = 0 } }
local mt_pdf = { __index = { char = 0x202C, direction = "pdf", original = "pdf", level = 0 } }
local mt_object = { __index = { char = 0xFFFC, direction = "on", original = "on", level = 0 } }
+local stack = table.setmetatableindex("table") -- shared
+local list = { } -- shared
+
local function build_list(head) -- todo: store node pointer ... saves loop
-- P1
local current = head
- local list = { }
local size = 0
while current do
size = size + 1
local id = getid(current)
local p = properties[current]
- local t
if p and p.directions then
local skip = 0
local last = id
current = getnext(current)
while current do
local id = getid(current)
+ local p = properties[current]
if p and p.directions then
skip = skip + 1
last = id
@@ -275,35 +278,29 @@ local function build_list(head) -- todo: store node pointer ... saves loop
end
end
if id == last then -- the start id
- t = { skip = skip, id = id }
+ list[size] = setmetatable({ skip = skip, id = id },mt_object)
else
- t = { skip = skip, id = id, last = last }
+ list[size] = setmetatable({ skip = skip, id = id, last = last },mt_object)
end
- setmetatable(t,mt_object)
elseif id == glyph_code then
- local chr = getchar(current)
- local dir = directiondata[chr]
- t = { char = chr, direction = dir, original = dir, level = 0 }
- current = getnext(current)
+ local chr = getchar(current)
+ local dir = directiondata[chr]
+ list[size] = { char = chr, direction = dir, original = dir, level = 0 }
+ current = getnext(current)
-- if not list[dir] then list[dir] = true end -- not faster when we check for usage
elseif id == glue_code then -- and how about kern
- t = { }
- setmetatable(t,mt_space)
- current = getnext(current)
+ list[size] = setmetatable({ },mt_space)
+ current = getnext(current)
elseif id == dir_code then
local dir = getdir(current)
if dir == "+TLT" then
- t = { }
- setmetatable(t,mt_lre)
+ list[size] = setmetatable({ },mt_lre)
elseif dir == "+TRT" then
- t = { }
- setmetatable(t,mt_rle)
+ list[size] = setmetatable({ },mt_rle)
elseif dir == "-TLT" or dir == "-TRT" then
- t = { }
- setmetatable(t,mt_pdf)
+ list[size] = setmetatable({ },mt_pdf)
else
- t = { id = id }
- setmetatable(t,mt_object)
+ list[size] = setmetatable({ id = id },mt_object)
end
current = getnext(current)
elseif id == math_code then
@@ -315,9 +312,8 @@ local function build_list(head) -- todo: store node pointer ... saves loop
end
skip = skip + 1
current = getnext(current)
- t = { id = id, skip = skip }
- setmetatable(t,mt_object)
- else
+ list[size] = setmetatable({ id = id, skip = skip },mt_object)
+ else -- disc_code: we assume that these are the same as the surrounding
local skip = 0
local last = id
current = getnext(current)
@@ -331,16 +327,12 @@ local function build_list(head) -- todo: store node pointer ... saves loop
break
end
end
- if skip == 0 then
- t = { id = id }
- elseif id == last then -- the start id
- t = { id = id, skip = skip }
+ if id == last then -- the start id
+ list[size] = setmetatable({ id = id, skip = skip },mt_object)
else
- t = { id = id, skip = skip, last = last }
+ list[size] = setmetatable({ id = id, skip = skip, last = last },mt_object)
end
- setmetatable(t,mt_object)
end
- list[size] = t
end
return list, size
end
@@ -361,8 +353,7 @@ end
local function resolve_fences(list,size,start,limit)
-- N0: funny effects, not always better, so it's an option
- local stack = { }
- local top = 0
+ local nofstack = 0
for i=start,limit do
local entry = list[i]
if entry.direction == "on" then
@@ -373,15 +364,18 @@ local function resolve_fences(list,size,start,limit)
entry.mirror = mirror
entry.class = class
if class == "open" then
- top = top + 1
- stack[top] = { mirror, i, false }
- elseif top == 0 then
+ nofstack = nofstack + 1
+ local stacktop = stack[nofstack]
+ stacktop[1] = mirror
+ stacktop[2] = i
+ stacktop[3] = false -- not used
+ elseif nofstack == 0 then
-- skip
elseif class == "close" then
- while top > 0 do
- local s = stack[top]
- if s[1] == char then
- local open = s[2]
+ while nofstack > 0 do
+ local stacktop = stack[nofstack]
+ if stacktop[1] == char then
+ local open = stacktop[2]
local close = i
list[open ].paired = close
list[close].paired = open
@@ -389,7 +383,7 @@ local function resolve_fences(list,size,start,limit)
else
-- do we mirror or not
end
- top = top - 1
+ nofstack = nofstack - 1
end
end
end
@@ -412,27 +406,26 @@ end
-- the action
-local function get_baselevel(head,list,size) -- todo: skip if first is object (or pass head and test for localpar)
- local id = getid(head)
- if id == localpar_code then
- if getdir(head) == "TRT" then
+local function get_baselevel(head,list,size,direction)
+ if not direction and getid(head) == localpar_code then
+ direction = getdir(head)
+ end
+ if direction == "TRT" then
+ return 1, "TRT", true
+ elseif direction == "TLT" then
+ return 0, "TLT", true
+ end
+ -- P2, P3:
+ for i=1,size do
+ local entry = list[i]
+ local direction = entry.direction
+ if direction == "r" or direction == "al" then -- and an ?
return 1, "TRT", true
- else
+ elseif direction == "l" then
return 0, "TLT", true
end
- else
- -- P2, P3
- for i=1,size do
- local entry = list[i]
- local direction = entry.direction
- if direction == "r" or direction == "al" then -- and an ?
- return 1, "TRT", true
- elseif direction == "l" then
- return 0, "TLT", true
- end
- end
- return 0, "TLT", false
end
+ return 0, "TLT", false
end
local function resolve_explicit(list,size,baselevel)
@@ -440,7 +433,6 @@ local function resolve_explicit(list,size,baselevel)
-- X1
local level = baselevel
local override = "on"
- local stack = { }
local nofstack = 0
for i=1,size do
local entry = list[i]
@@ -449,7 +441,9 @@ local function resolve_explicit(list,size,baselevel)
if direction == "rle" then
if nofstack < maximum_stack then
nofstack = nofstack + 1
- stack[nofstack] = { level, override }
+ local stacktop = stack[nofstack]
+ stacktop[1] = level
+ stacktop[2] = override
level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level)
override = "on"
entry.level = level
@@ -462,7 +456,9 @@ local function resolve_explicit(list,size,baselevel)
elseif direction == "lre" then
if nofstack < maximum_stack then
nofstack = nofstack + 1
- stack[nofstack] = { level, override }
+ local stacktop = stack[nofstack]
+ stacktop[1] = level
+ stacktop[2] = override
level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level)
override = "on"
entry.level = level
@@ -475,7 +471,9 @@ local function resolve_explicit(list,size,baselevel)
elseif direction == "rlo" then
if nofstack < maximum_stack then
nofstack = nofstack + 1
- stack[nofstack] = { level, override }
+ local stacktop = stack[nofstack]
+ stacktop[1] = level
+ stacktop[2] = override
level = level + (level % 2 == 1 and 2 or 1) -- least_greater_odd(level)
override = "r"
entry.level = level
@@ -488,7 +486,9 @@ local function resolve_explicit(list,size,baselevel)
elseif direction == "lro" then
if nofstack < maximum_stack then
nofstack = nofstack + 1
- stack[nofstack] = { level, override }
+ local stacktop = stack[nofstack]
+ stacktop[1] = level
+ stacktop[2] = override
level = level + (level % 2 == 1 and 1 or 2) -- least_greater_even(level)
override = "l"
entry.level = level
@@ -501,9 +501,9 @@ local function resolve_explicit(list,size,baselevel)
elseif direction == "pdf" then
if nofstack < maximum_stack then
local stacktop = stack[nofstack]
- nofstack = nofstack - 1
level = stacktop[1]
override = stacktop[2]
+ nofstack = nofstack - 1
entry.level = level
entry.direction = "bn"
entry.remove = true
@@ -591,29 +591,31 @@ local function resolve_weak(list,size,start,limit,orderbefore,orderafter)
end
else -- only more efficient when we have es/cs
local runner = start + 2
- local before = list[start]
- local entry = list[start + 1]
- local after = list[runner]
- while after do
- local direction = entry.direction
- if direction == "es" then
- if before.direction == "en" and after.direction == "en" then
- entry.direction = "en"
- end
- elseif direction == "cs" then
- local prevdirection = before.direction
- if prevdirection == "en" then
- if after.direction == "en" then
+ if runner <= limit then
+ local before = list[start]
+ local entry = list[start + 1]
+ local after = list[runner]
+ while after do
+ local direction = entry.direction
+ if direction == "es" then
+ if before.direction == "en" and after.direction == "en" then
entry.direction = "en"
end
- elseif prevdirection == "an" and after.direction == "an" then
- entry.direction = "an"
+ elseif direction == "cs" then
+ local prevdirection = before.direction
+ if prevdirection == "en" then
+ if after.direction == "en" then
+ entry.direction = "en"
+ end
+ elseif prevdirection == "an" and after.direction == "an" then
+ entry.direction = "an"
+ end
end
+ before = current
+ current = after
+ after = list[runner]
+ runner = runner + 1
end
- before = current
- current = after
- after = list[runner]
- runner = runner + 1
end
end
-- end
@@ -850,11 +852,67 @@ local function resolve_levels(list,size,baselevel,analyze_fences)
end
end
+-- local function insert_dir_points(list,size)
+-- -- L2, but no actual reversion is done, we simply annotate where
+-- -- begindir/endddir node will be inserted.
+-- local maxlevel = 0
+-- local finaldir = false
+-- local toggle = true
+-- for i=1,size do
+-- local level = list[i].level
+-- if level > maxlevel then
+-- maxlevel = level
+-- end
+-- end
+-- for level=0,maxlevel do
+-- local started -- = false
+-- local begindir -- = nil
+-- local enddir -- = nil
+-- local prev -- = nil
+-- if toggle then
+-- begindir = "+TLT"
+-- enddir = "-TLT"
+-- toggle = false
+-- else
+-- begindir = "+TRT"
+-- enddir = "-TRT"
+-- toggle = true
+-- end
+-- for i=1,size do
+-- local entry = list[i]
+-- if entry.level >= level then
+-- if not started then
+-- entry.begindir = begindir
+-- started = true
+-- end
+-- else
+-- if started then
+-- prev.enddir = enddir
+-- started = false
+-- end
+-- end
+-- prev = entry
+-- end
+-- -- make sure to close the run at end of line
+-- if started then
+-- finaldir = enddir
+-- end
+-- end
+-- if finaldir then
+-- list[size].enddir = finaldir
+-- end
+-- for i=1,size do
+-- print("<",i,list[i].level,list[i].begindir,list[i].enddir)
+-- end
+-- end
+
+local stack = { }
+
local function insert_dir_points(list,size)
-- L2, but no actual reversion is done, we simply annotate where
-- begindir/endddir node will be inserted.
local maxlevel = 0
- local finaldir = false
+ local toggle = true
for i=1,size do
local level = list[i].level
if level > maxlevel then
@@ -862,15 +920,18 @@ local function insert_dir_points(list,size)
end
end
for level=0,maxlevel do
- local started = false
- local begindir = nil
- local enddir = nil
- if level % 2 == 1 then
- begindir = "+TRT"
- enddir = "-TRT"
- else
+ local started -- = false
+ local begindir -- = nil
+ local enddir -- = nil
+ local prev -- = nil
+ if toggle then
begindir = "+TLT"
enddir = "-TLT"
+ toggle = false
+ else
+ begindir = "+TRT"
+ enddir = "-TRT"
+ toggle = true
end
for i=1,size do
local entry = list[i]
@@ -881,18 +942,35 @@ local function insert_dir_points(list,size)
end
else
if started then
- list[i-1].enddir = enddir
- started = false
+ prev.enddir = enddir
+ started = false
end
end
- end
- -- make sure to close the run at end of line
- if started then
- finaldir = enddir
+ prev = entry
end
end
- if finaldir then
- list[size].enddir = finaldir
+ -- make sure to close the run at end of line
+ local last = list[size]
+ if not last.enddir then
+ local n = 0
+ for i=1,size do
+ local entry = list[i]
+ local e = entry.enddir
+ local b = entry.begindir
+ if e then
+ n = n - 1
+ end
+ if b then
+ n = n + 1
+ stack[n] = b
+ end
+ end
+ if n > 0 then
+ if trace_list and n > 1 then
+ report_directions("unbalanced list")
+ end
+ last.enddir = stack[n] == "+TRT" and "-TRT" or "-TLT"
+ end
end
end
@@ -993,18 +1071,22 @@ local function apply_to_list(list,size,head,pardir)
return head, done
end
-local function process(head)
+-- If needed we can optimize for only_one. There is no need to do anything
+-- when it's not a glyph. Otherwise we only need to check mirror and apply
+-- directions when it's different from the surrounding. Paragraphs always
+-- have more than one node. Actually, we only enter this function when we
+-- do have a glyph!
+
+local function process(head,direction,only_one)
head = tonut(head)
-- for the moment a whole paragraph property
local attr = getattr(head,a_directions)
local analyze_fences = getfences(attr)
--
local list, size = build_list(head)
- local baselevel, pardir, dirfound = get_baselevel(head,list,size) -- we always have an inline dir node in context
- if not dirfound and trace_details then
- report_directions("no initial direction found, gambling")
- end
+ local baselevel, pardir, dirfound = get_baselevel(head,list,size,direction) -- we always have an inline dir node in context
if trace_details then
+ report_directions("analyze: direction %a, baselevel %a",dirfound and pardir or "unknown",baselevel or 1)
report_directions("before : %s",show_list(list,size,"original"))
end
resolve_explicit(list,size,baselevel)
@@ -1014,7 +1096,7 @@ local function process(head)
report_directions("after : %s",show_list(list,size,"direction"))
report_directions("result : %s",show_done(list,size))
end
- head, done = apply_to_list(list,size,head,pardir)
+ local head, done = apply_to_list(list,size,head,pardir)
return tonode(head), done
end
diff --git a/tex/context/base/mkiv/typo-fkr.mkiv b/tex/context/base/mkiv/typo-fkr.mkiv
index 684d831bc..7444db511 100644
--- a/tex/context/base/mkiv/typo-fkr.mkiv
+++ b/tex/context/base/mkiv/typo-fkr.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Typesetting Macros / Additional Font Kerning}
-\registerctxluafile{typo-fkr}{1.001}
+\registerctxluafile{typo-fkr}{}
\definesystemattribute[extrafontkern][public]
diff --git a/tex/context/base/mkiv/typo-fln.lua b/tex/context/base/mkiv/typo-fln.lua
index cef77cea1..4fb82ce44 100644
--- a/tex/context/base/mkiv/typo-fln.lua
+++ b/tex/context/base/mkiv/typo-fln.lua
@@ -38,8 +38,8 @@ local getprev = nuts.getprev
local getboth = nuts.getboth
local setboth = nuts.setboth
local getid = nuts.getid
-local getfield = nuts.getfield
-local setfield = nuts.setfield
+local getsubtype = nuts.getsubtype
+local getwidth = nuts.getwidth
local getlist = nuts.getlist
local setlist = nuts.setlist
local getattr = nuts.getattr
@@ -54,14 +54,19 @@ local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
local disc_code = nodecodes.disc
local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+
+local spaceskip_code = nodes.gluecodes.spaceskip
local traverse_id = nuts.traverse_id
local flush_node_list = nuts.flush_list
local flush_node = nuts.flush_node
local copy_node_list = nuts.copy_list
+local insert_node_before = nuts.insert_before
local insert_node_after = nuts.insert_after
local remove_node = nuts.remove
local list_dimensions = nuts.dimensions
+local hpack_node_list = nuts.hpack
local nodepool = nuts.pool
local newpenalty = nodepool.penalty
@@ -115,7 +120,7 @@ implement {
}
actions[v_line] = function(head,setting)
- -- local attribute = fonts.specifiers.contextnumber(setting.feature) -- was experimental
+ -- local attribute = fonts.specifiers.contextnumber(setting.feature) -- was experimental
local dynamic = setting.dynamic
local font = setting.font
local noflines = setting.n or 1
@@ -166,11 +171,28 @@ actions[v_line] = function(head,setting)
hsize = hsize - hangindent
end
+ local function list_dimensions(list,start)
+ local temp = copy_node_list(list,start)
+ temp = tonode(temp)
+ temp = nodes.handlers.characters(temp)
+ temp = nodes.injections.handler(temp)
+ -- temp = typesetters.fontkerns.handler(temp) -- maybe when enabled
+ -- nodes.handlers.protectglyphs(temp) -- not needed as we discard
+ -- temp = typesetters.spacings.handler(temp) -- maybe when enabled
+ -- temp = typesetters.kerns.handler(temp) -- maybe when enabled
+ temp = tonut(temp)
+ temp = hpack_node_list(temp)
+ local width = getwidth(temp)
+ flush_node_list(temp)
+ return width
+ end
+
local function try(extra)
local width = list_dimensions(list,start)
if extra then
width = width + list_dimensions(extra)
end
+ -- report_firstlines("line length: %p, progression: %p, text: %s",hsize,width,nodes.listtoutf(list,nil,nil,start))
if width > hsize then
list = prev
return true
@@ -188,7 +210,10 @@ actions[v_line] = function(head,setting)
elseif id == disc_code then
-- this could be an option
n = n + 1
- if try(getfield(start,"pre")) then
+ local pre, post, replace = getdisc(start)
+ if pre and try(pre) then
+ break
+ elseif replace and try(replace) then
break
end
elseif id == kern_code then -- todo: fontkern
@@ -204,6 +229,9 @@ actions[v_line] = function(head,setting)
linebreaks[i] = n
end
end
+
+ flush_node_list(temp)
+
local start = head
local n = 0
@@ -225,6 +253,7 @@ actions[v_line] = function(head,setting)
local linebreak = linebreaks[i]
while start and n < nofchars do
local id = getid(start)
+ local ok = false
if id == glyph_code then
n = n + 1
update(start)
@@ -269,6 +298,8 @@ actions[v_line] = function(head,setting)
end
setdisc(disc,pre,post,replace)
flush_node(disc)
+ elseif id == glue_code then
+ head = insert_node_before(head,start,newpenalty(10000)) -- nobreak
end
if linebreak == n then
if trace_firstlines then
@@ -282,7 +313,7 @@ actions[v_line] = function(head,setting)
start = getnext(start)
end
end
- flush_node_list(temp)
+
return head, true
end
diff --git a/tex/context/base/mkiv/typo-fln.mkiv b/tex/context/base/mkiv/typo-fln.mkiv
index e883c57d2..37348be29 100644
--- a/tex/context/base/mkiv/typo-fln.mkiv
+++ b/tex/context/base/mkiv/typo-fln.mkiv
@@ -50,7 +50,7 @@
\unprotect
-\registerctxluafile{typo-fln}{1.001}
+\registerctxluafile{typo-fln}{}
\definesystemattribute[firstline][public]
diff --git a/tex/context/base/mkiv/typo-ini.mkiv b/tex/context/base/mkiv/typo-ini.mkiv
index 1c2d6617a..3e0790af0 100644
--- a/tex/context/base/mkiv/typo-ini.mkiv
+++ b/tex/context/base/mkiv/typo-ini.mkiv
@@ -20,7 +20,7 @@
\writestatus{loading}{ConTeXt Typographic Macros / Initialization}
-\registerctxluafile{typo-ini}{1.001}
+\registerctxluafile{typo-ini}{}
\unprotect
diff --git a/tex/context/base/mkiv/typo-inj.mkiv b/tex/context/base/mkiv/typo-inj.mkiv
index 59f35378f..3b77379b3 100644
--- a/tex/context/base/mkiv/typo-inj.mkiv
+++ b/tex/context/base/mkiv/typo-inj.mkiv
@@ -24,7 +24,7 @@
\unprotect
-\registerctxluafile{typo-inj}{1.001}
+\registerctxluafile{typo-inj}{}
% todo: no need in trialmode
diff --git a/tex/context/base/mkiv/typo-itc.lua b/tex/context/base/mkiv/typo-itc.lua
index 312832d5b..328bf1406 100644
--- a/tex/context/base/mkiv/typo-itc.lua
+++ b/tex/context/base/mkiv/typo-itc.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['typo-itc'] = {
license = "see context related readme files"
}
+local tonumber = tonumber
local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end)
@@ -31,7 +32,6 @@ local nodepool = nuts.pool
local tonode = nuts.tonode
local tonut = nuts.tonut
-local getfield = nuts.getfield
local getprev = nuts.getprev
local getnext = nuts.getnext
local getid = nuts.getid
@@ -42,7 +42,6 @@ local getattr = nuts.getattr
local setattr = nuts.setattr
local getattrlist = nuts.getattrlist
local setattrlist = nuts.setattrlist
-local setfield = nuts.setfield
local setdisc = nuts.setdisc
local isglyph = nuts.isglyph
local setkern = nuts.setkern
@@ -249,9 +248,9 @@ local function domath(head,current, done)
a = a + 100
end
if trace_italics then
- report_italics("adding italic between math %C and non punctuation %C",getchar(glyph),char)
+ report_italics("%s italic %p between math %C and non punctuation %C","adding",a,getchar(glyph),char)
end
- insert_node_after(head,glyph,new_correction_kern(a))
+ insert_node_after(head,glyph,correction_kern(a,glyph))
done = true
end
end
@@ -263,11 +262,12 @@ local function domath(head,current, done)
end
local function mathhandler(head)
- local current = tonut(head)
+ local nuthead = tonut(head)
+ local current = nuthead
local done = false
while current do
if getid(current) == math_code then
- current, done = domath(head,current,done)
+ current, done = domath(nuthead,current,done)
end
current = getnext(current)
end
@@ -415,7 +415,6 @@ local function texthandler(head)
lastattr = attr
replacechar = char
replacehead = replace
- replace = current
updated = true
end
end
@@ -439,7 +438,7 @@ local function texthandler(head)
if attr and attr > 0 then
local cd = data[char]
if not cd then
- -- this really can happen
+ -- this really can happen
-- postitalic = 0
else
postitalic = cd.italic
@@ -452,8 +451,7 @@ local function texthandler(head)
lastattr = attr
postchar = char
posthead = post
- post = current
- updated = true
+ updated = true
end
end
end
diff --git a/tex/context/base/mkiv/typo-itc.mkvi b/tex/context/base/mkiv/typo-itc.mkvi
index b0d2673cd..5aeb67239 100644
--- a/tex/context/base/mkiv/typo-itc.mkvi
+++ b/tex/context/base/mkiv/typo-itc.mkvi
@@ -22,7 +22,7 @@
%D \setupitaliccorrection[global,always]
%D \stoptyping
-\registerctxluafile{typo-itc}{1.001}
+\registerctxluafile{typo-itc}{}
\definesystemattribute[italics] [public]
\definesystemattribute[mathitalics][public]
diff --git a/tex/context/base/mkiv/typo-krn.lua b/tex/context/base/mkiv/typo-krn.lua
index 24a91d6b6..71d9736a4 100644
--- a/tex/context/base/mkiv/typo-krn.lua
+++ b/tex/context/base/mkiv/typo-krn.lua
@@ -8,6 +8,8 @@ if not modules then modules = { } end modules ['typo-krn'] = {
-- glue is still somewhat suboptimal
-- components: better split on tounicode
+--
+-- maybe ignore when properties[n].injections.cursivex (or mark)
local next, type, tonumber = next, type, tonumber
@@ -31,7 +33,6 @@ local insert_node_after = nuts.insert_after
local end_of_math = nuts.end_of_math
local use_components = nuts.use_components
-local getfield = nuts.getfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
@@ -79,7 +80,7 @@ local user_list_code = listcodes.unknown
local discretionary_code = disccodes.discretionary
local automatic_code = disccodes.automatic
-local kerning_code = kerncodes.kerning
+local fontkern_code = kerncodes.fontkern
local userkern_code = kerncodes.userkern
local userskip_code = skipcodes.userskip
local spaceskip_code = skipcodes.spaceskip
@@ -103,8 +104,8 @@ local v_auto = interfaces.variables.auto
typesetters = typesetters or { }
local typesetters = typesetters
-typesetters.kerns = typesetters.kerns or { }
-local kerns = typesetters.kerns
+local kerns = typesetters.kerns or { }
+typesetters.kerns = kerns
local report = logs.reporter("kerns")
local trace_ligatures = false trackers.register("typesetters.kerns.ligatures", function(v) trace_ligatures = v end)
@@ -113,7 +114,6 @@ local trace_ligatures_d = false trackers.register("typesetters.kerns.ligatures
kerns.mapping = kerns.mapping or { }
kerns.factors = kerns.factors or { }
local a_kerns = attributes.private("kern")
-local a_fontkern = attributes.private('fontkern')
local contextsetups = fonts.specifiers.contextsetups
@@ -223,7 +223,7 @@ end
local function inject_begin(boundary,prev,keeptogether,krn,ok) -- prev is a glyph
local char, id = isglyph(boundary)
if id == kern_code then
- if getsubtype(boundary) == kerning_code or getattr(boundary,a_fontkern) then
+ if getsubtype(boundary) == fontkern_code then
local inject = true
if keeptogether then
local next = getnext(boundary)
@@ -259,7 +259,7 @@ local function inject_end(boundary,next,keeptogether,krn,ok)
local tail = find_node_tail(boundary)
local char, id = getid(tail)
if id == kern_code then
- if getsubtype(tail) == kerning_code or getattr(tail,a_fontkern) then
+ if getsubtype(tail) == fontkern_code then
local inject = true
if keeptogether then
local prev = getprev(tail)
@@ -309,7 +309,7 @@ local function process_list(head,keeptogether,krn,font,okay)
if mark[char] then
-- skip
elseif pid == kern_code then
- if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
+ if getsubtype(prev) == fontkern_code then
local inject = true
if keeptogether then
local prevprev = getprev(prev)
@@ -408,7 +408,7 @@ function kerns.handler(head)
elseif mark[char] then
-- skip
elseif previd == kern_code then
- if getsubtype(prev) == kerning_code or getattr(prev,a_fontkern) then
+ if getsubtype(prev) == fontkern_code then
local inject = true
if keeptogether then
if previd == glyph_code and keeptogether(prev,start) then
@@ -446,24 +446,24 @@ function kerns.handler(head)
elseif id == disc_code then
local prev, next, pglyph, nglyph -- delayed till needed
local subtype = getsubtype(start)
- if subtype == automatic_code then
- -- this is kind of special, as we have already injected the
- -- previous kern
- local prev = getprev(start)
- local pglyph = prev and getid(prev) == glyph_code
- languages.expand(start,pglyph and prev)
- -- we can have a different start now
- elseif subtype ~= discretionary_code then
- prev = getprev(start)
- pglyph = prev and getid(prev) == glyph_code
- languages.expand(start,pglyph and prev)
- end
+ -- if subtype == automatic_code then
+ -- -- this is kind of special, as we have already injected the
+ -- -- previous kern
+ -- local prev = getprev(start)
+ -- local pglyph = prev and getid(prev) == glyph_code
+ -- languages.expand(start,pglyph and prev)
+ -- -- we can have a different start now
+ -- elseif subtype ~= discretionary_code then
+ -- prev = getprev(start)
+ -- pglyph = prev and getid(prev) == glyph_code
+ -- languages.expand(start,pglyph and prev)
+ -- end
local pre, post, replace = getdisc(start)
local indeed = false
if pre then
local okay = false
if not prev then
- prev = prev or getprev(start)
+ prev = getprev(start)
pglyph = prev and getid(prev) == glyph_code
end
if pglyph then
@@ -491,7 +491,7 @@ function kerns.handler(head)
if replace then
local okay = false
if not prev then
- prev = prev or getprev(start)
+ prev = getprev(start)
pglyph = prev and getid(prev) == glyph_code
end
if pglyph then
@@ -518,7 +518,7 @@ function kerns.handler(head)
end
bound = false
elseif id == kern_code then
- bound = getsubtype(start) == kerning_code or getattr(start,a_fontkern)
+ bound = getsubtype(start) == fontkern_code
prev = start
previd = id
elseif id == glue_code then
@@ -564,7 +564,7 @@ function kerns.handler(head)
start = getnext(start)
end
elseif id == kern_code then
- bound = getsubtype(start) == kerning_code or getattr(start,a_fontkern)
+ bound = getsubtype(start) == fontkern_code
prev = start
previd = id
start = getnext(start)
diff --git a/tex/context/base/mkiv/typo-krn.mkiv b/tex/context/base/mkiv/typo-krn.mkiv
index 6d6126542..fedbce4d1 100644
--- a/tex/context/base/mkiv/typo-krn.mkiv
+++ b/tex/context/base/mkiv/typo-krn.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{typo-krn}{1.001}
+\registerctxluafile{typo-krn}{}
\definesystemattribute[kern][public]
diff --git a/tex/context/base/mkiv/typo-lan.mkiv b/tex/context/base/mkiv/typo-lan.mkiv
index 6bc052b99..fed88789a 100644
--- a/tex/context/base/mkiv/typo-lan.mkiv
+++ b/tex/context/base/mkiv/typo-lan.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{typo-lan}{1.001}
+\registerctxluafile{typo-lan}{}
%D \macros
%D {averagecharwidth, charwidthlanguage}
diff --git a/tex/context/base/mkiv/typo-lin.lua b/tex/context/base/mkiv/typo-lin.lua
index d702bcb8c..ebf748a82 100644
--- a/tex/context/base/mkiv/typo-lin.lua
+++ b/tex/context/base/mkiv/typo-lin.lua
@@ -87,8 +87,6 @@ local getid = nuts.getid
local getnext = nuts.getnext
local getprev = nuts.getprev
local getboth = nuts.getboth
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local setlink = nuts.setlink
local setkern = nuts.setkern
local getkern = nuts.getkern
diff --git a/tex/context/base/mkiv/typo-lin.mkiv b/tex/context/base/mkiv/typo-lin.mkiv
index eba08b4c8..a05f770fe 100644
--- a/tex/context/base/mkiv/typo-lin.mkiv
+++ b/tex/context/base/mkiv/typo-lin.mkiv
@@ -15,7 +15,7 @@
%D Beware: the mvl callback is also called when a line is added to a vbox.
-\registerctxluafile{typo-lin}{1.001}
+\registerctxluafile{typo-lin}{}
\unprotect
diff --git a/tex/context/base/mkiv/typo-mar.lua b/tex/context/base/mkiv/typo-mar.lua
index a5d607cd7..bc9c408c1 100644
--- a/tex/context/base/mkiv/typo-mar.lua
+++ b/tex/context/base/mkiv/typo-mar.lua
@@ -13,18 +13,21 @@ if not modules then modules = { } end modules ['typo-mar'] = {
local format, validstring = string.format, string.valid
local insert, remove, sortedkeys, fastcopy = table.insert, table.remove, table.sortedkeys, table.fastcopy
-local setmetatable, next = setmetatable, next
+local setmetatable, next, tonumber = setmetatable, next, tonumber
local formatters = string.formatters
local toboolean = toboolean
local settings_to_hash = utilities.parsers.settings_to_hash
-local attributes, nodes, node, variables = attributes, nodes, node, variables
+local attributes = attributes
+local nodes = nodes
+local variables = variables
+local context = context
-local trace_margindata = false trackers.register("typesetters.margindata", function(v) trace_margindata = v end)
-local trace_marginstack = false trackers.register("typesetters.margindata.stack", function(v) trace_marginstack = v end)
-local trace_margingroup = false trackers.register("typesetters.margindata.group", function(v) trace_margingroup = v end)
+local trace_margindata = false trackers.register("typesetters.margindata", function(v) trace_margindata = v end)
+local trace_marginstack = false trackers.register("typesetters.margindata.stack", function(v) trace_marginstack = v end)
+local trace_margingroup = false trackers.register("typesetters.margindata.group", function(v) trace_margingroup = v end)
-local report_margindata = logs.reporter("margindata")
+local report_margindata = logs.reporter("margindata")
local tasks = nodes.tasks
local prependaction = tasks.prependaction
@@ -66,7 +69,6 @@ local traverse_id = nuts.traverse_id
local flush_node_list = nuts.flush_list
local getfield = nuts.getfield
-local setfield = nuts.setfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
@@ -165,7 +167,7 @@ setmetatable(displaystore,mt2)
local defaults = {
__index = {
location = v_left,
- align = v_normal,
+ align = v_normal, -- not used
method = "",
name = "",
threshold = 0, -- .25ex
diff --git a/tex/context/base/mkiv/typo-mar.mkiv b/tex/context/base/mkiv/typo-mar.mkiv
index 462cc633e..f265f173c 100644
--- a/tex/context/base/mkiv/typo-mar.mkiv
+++ b/tex/context/base/mkiv/typo-mar.mkiv
@@ -36,7 +36,7 @@
%D need an extra pass to get inner and outer alignments in sync with
%D the pagebuilder.
-\registerctxluafile{typo-mar}{1.001}
+\registerctxluafile{typo-mar}{}
%definesystemattribute[margindata] % only at the lua end
diff --git a/tex/context/base/mkiv/typo-ovl.lua b/tex/context/base/mkiv/typo-ovl.lua
new file mode 100644
index 000000000..09cf5e129
--- /dev/null
+++ b/tex/context/base/mkiv/typo-ovl.lua
@@ -0,0 +1,183 @@
+if not modules then modules = { } end modules ['typo-ovl'] = {
+ version = 1.001,
+ comment = "companion to typo-ovl.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is dubious code. If you needed it your source is probably bad. We only used
+-- in when we had to mark bad content but when cleaning up some project code I decided
+-- that it is easier to maintain in the distribution then in a project style. After all,
+-- we have hardly any private code. For convenience I hooked it into the existing
+-- replacement module (as it used the same code anyway). I did some cleanup.
+
+local next = next
+
+local context = context
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+local tonode = nodes.tonode
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+
+local getnext = nuts.getnext
+local getid = nuts.getid
+local getdisc = nuts.getdisc
+local getattr = nuts.getattr
+local setattr = nuts.setattr
+local getattrlist = nuts.getattrlist
+local setattrlist = nuts.setattrlist
+local getfield = nuts.getfield
+local setfont = nuts.setfont
+
+local unsetvalue = attributes.unsetvalue
+local prvattribute = attributes.private
+
+local texgetbox = tex.getbox
+local currentfont = font.current
+
+local a_overloads = attributes.private("overloads")
+local n_overloads = 0
+local t_overloads = { }
+
+local overloaded = { }
+
+local function markasoverload(a)
+ local n = prvattribute(a)
+ if n then
+ overloaded[n] = a
+ end
+end
+
+attributes.markasoverload = markasoverload
+
+markasoverload("color")
+markasoverload("colormodel")
+markasoverload("transparency")
+markasoverload("case")
+markasoverload("negative")
+markasoverload("effect")
+markasoverload("ruled")
+markasoverload("shifted")
+markasoverload("kernchars")
+markasoverload("kern")
+markasoverload("noligature")
+markasoverload("viewerlayer")
+
+local function tooverloads(n)
+ local current = tonut(n)
+ local a = getattrlist(current)
+ local s = { }
+ while a do
+ local n = getfield(a,"number")
+ local o = overloaded[n]
+ if o then
+ local v = getfield(a,"value")
+ if v ~= unsetvalue then
+ s[n] = v
+ -- print(o,n,v)
+ end
+ end
+ a = getnext(a)
+ end
+ return s
+end
+
+attributes.tooverloads = tooverloads
+
+function attributes.applyoverloads(specification,start,stop)
+ local current = tonut(start)
+ local processor = specification.processor
+ local overloads = specification.processor or getattr(current,a_overloads)
+ if overloads and overloads ~= unsetvalue then
+ overloads = t_overloads[overloads]
+ if not overloads then
+ return
+ end
+ else
+ return
+ end
+
+ local last = stop and tonut(stop)
+ local oldlist = nil
+ local newlist = nil
+ local newfont = overloads.font
+
+ local function apply()
+ local a = getattrlist(current)
+ if a == oldlist then
+ setattrlist(current,newlist)
+ else
+ oldlist = getattrlist(current)
+ for k, v in next, overloads do
+ setattr(current,k,v)
+ end
+ newlist = current -- getattrlist(current)
+ end
+ if newfont then
+ setfont(current,newfont)
+ end
+ end
+
+ while current do
+ local id = getid(current)
+ if id == glyph_code then
+ apply()
+ elseif id == disc_code then
+ apply()
+ if pre then
+ while pre do
+ if getid(pre) == glyph_code then
+ apply()
+ end
+ pre = getnext(pre)
+ end
+ end
+ if post then
+ while post do
+ if getid(post) == glyph_code then
+ apply()
+ end
+ post = getnext(post)
+ end
+ end
+ if replace then
+ while replace do
+ if getid(replace) == glyph_code then
+ apply()
+ end
+ replace = getnext(replace)
+ end
+ end
+ end
+ if current == last then
+ break
+ end
+ current = getnext(current)
+ end
+end
+
+-- we assume the same highlight so we're global
+
+interfaces.implement {
+ name = "overloadsattribute",
+ arguments = { "string", "integer", "integer" },
+ actions = function(name,font,box)
+ local samplebox = texgetbox(box)
+ local sample = samplebox and samplebox.list
+ local overloads = sample and tooverloads(sample)
+ if overloads then
+ overloads.font = font > 0 and font or false
+ n_overloads = n_overloads + 1
+ t_overloads[n_overloads] = overloads
+ t_overloads[name] = overloads
+ context(n_overloads)
+ else
+ context(unsetvalue)
+ end
+ end
+}
diff --git a/tex/context/base/mkiv/typo-ovl.mkiv b/tex/context/base/mkiv/typo-ovl.mkiv
new file mode 100644
index 000000000..025ae8a8f
--- /dev/null
+++ b/tex/context/base/mkiv/typo-ovl.mkiv
@@ -0,0 +1,115 @@
+%D \module
+%D [ file=lang-ovl,
+%D version=2016.02.03, % about that time (maybe earlier)
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Overloads,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Overloads}
+
+\unprotect
+
+%D See \LUA\ file for comments. I'll probably never use this code outside the
+%D project where it had some experimental use. (Occasionally we do some spell
+%D checking and suspicious word usage marking.) So there is not much documentation
+%D here. Keep in mind that overloading only makes sense for content that gets
+%D replaced as otherwise normal attributes make more sense. Using this otherwise
+%D is weird but one never knows what users come up with.
+
+\registerctxluafile{typo-ovl}{}
+
+\definesystemattribute[overloads][public,global]
+
+\installcorenamespace{overloads}
+
+%D Watch how we abuse highlights.
+
+\newcount\c_typo_overload_font_id
+
+\unexpanded\def\typo_overloads_define#1%
+ {\begingroup
+ % we pack so we avoid interference (and for sure don't process!)
+ \setbox\scratchbox\hpack\bgroup
+ \global\c_typo_overload_font_id\fontid\font
+ \resetallattributes % so we only change what gets set
+ \highlight[#1]%
+ \signalcharacter
+ \global\c_typo_overload_font_id
+ \ifnum\c_typo_overload_font_id=\fontid\font
+ \zerocount
+ \else
+ \fontid\font
+ \fi
+ \egroup
+ \global\expandafter\chardef\csname\??overloads#1\endcsname
+ \clf_overloadsattribute{#1}\c_typo_overload_font_id\scratchbox
+ \endgroup}
+
+\unexpanded\def\setoverloads[#1]%
+ {\ifcsname\??overloads#1\endcsname\else
+ \typo_overloads_define{#1}%
+ \fi
+ \attribute\overloadsattribute\csname\??overloads#1\endcsname}
+
+\unexpanded\def\registeroverloads[#1]%
+ {\ifcsname\??overloads#1\endcsname\else
+ \typo_overloads_define{#1}%
+ \fi}
+
+\unexpanded\def\resetoverloads
+ {\overloadsattribute\attributeunsetvalue}
+
+%D We hook this into the replacement module, probably the only place where it makes
+%D sense. Here is an adapted version of an example:
+%D
+%D \starttyping
+%D \replaceword[basics][new] [old]
+%D \replaceword[basics][now] [hl3->never]
+%D \replaceword[basics][never][now]
+%D \replaceword[basics][heck] []
+%D
+%D \definehighlight[hl1][style={\setbar[underbar]\red\setcharactercasing[WORD]\bf}]
+%D \definehighlight[hl2][style={\setbar[overbar]\blue\setcharactercasing[Words]}]
+%D \definehighlight[hl3][style={\setbar[overstrike]\green\bs}]
+%D
+%D \registeroverloads[hl1]
+%D \registeroverloads[hl2]
+%D \registeroverloads[hl3]
+%D
+%D \start \setreplacements[basics][hl1] What the heck, it's now or never, isn't it new? \stop \par
+%D \start \setreplacements[basics][hl2] What the heck, it's now or never, isn't it new? \stop \par
+%D \start \setreplacements[basics][hl1] What the heck, it's now or never, isn't it new? \stop \par
+%D \start \setreplacements[basics] What the heck, it's now or never, isn't it new? \stop \par
+%D \stoptyping
+%D
+%D We used it to mark synonyms that should not be corrected by the editor. Sort of the
+%D reverse of not having a word in a vetted wordlist. A bit out of place in a typesetting
+%D system. If really needed we can support multiple words seperataed by spaces but I didn't
+%D need it. And \unknown\ fonts are supported but at fixed size!
+
+\ifdefined\setreplacements
+
+ \unexpanded\def\setreplacements
+ {\dodoubleempty\languages_replacements_set}
+
+ \unexpanded\def\languages_replacements_set[#1][#2]%
+ {\clf_setreplacements{#1}%
+ \ifsecondargument
+ \setoverloads[#2]%
+ \else
+ \resetoverloads
+ \fi}
+
+\else
+
+ % something weird
+
+\fi
+
+\protect
diff --git a/tex/context/base/mkiv/typo-pag.lua b/tex/context/base/mkiv/typo-pag.lua
index d6f71c8cc..b5759a097 100644
--- a/tex/context/base/mkiv/typo-pag.lua
+++ b/tex/context/base/mkiv/typo-pag.lua
@@ -28,8 +28,6 @@ local a_keeptogether = attributes.private("keeptogether")
local nuts = nodes.nuts
local tonut = nuts.tonut
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local getnext = nuts.getnext
local getprev = nuts.getprev
local getid = nuts.getid
diff --git a/tex/context/base/mkiv/typo-pag.mkiv b/tex/context/base/mkiv/typo-pag.mkiv
index 30d2b51b7..07443eb16 100644
--- a/tex/context/base/mkiv/typo-pag.mkiv
+++ b/tex/context/base/mkiv/typo-pag.mkiv
@@ -15,6 +15,6 @@
\unprotect
-\registerctxluafile{typo-pag}{1.001}
+\registerctxluafile{typo-pag}{}
\protect \endinput
diff --git a/tex/context/base/mkiv/typo-par.mkiv b/tex/context/base/mkiv/typo-par.mkiv
index 8572f31b8..3db0ffa45 100644
--- a/tex/context/base/mkiv/typo-par.mkiv
+++ b/tex/context/base/mkiv/typo-par.mkiv
@@ -23,7 +23,7 @@
\unprotect
-\registerctxluafile{node-ltp}{1.001}
-\registerctxluafile{trac-par}{1.001}
+\registerctxluafile{node-ltp}{}
+\registerctxluafile{trac-par}{}
\protect \endinput
diff --git a/tex/context/base/mkiv/typo-pnc.lua b/tex/context/base/mkiv/typo-pnc.lua
new file mode 100644
index 000000000..1ed8d9940
--- /dev/null
+++ b/tex/context/base/mkiv/typo-pnc.lua
@@ -0,0 +1,169 @@
+if not modules then modules = { } end modules ['typo-pnc'] = {
+ version = 1.001,
+ comment = "companion to typo-pnc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local nodes = nodes
+local fonts = fonts
+
+local prependaction = nodes.tasks.prependaction
+local enableaction = nodes.tasks.enableaction
+
+local nuts = nodes.nuts
+local tonut = nodes.tonut
+
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local spaceskip_code = gluecodes.spaceskip
+
+local new_kern = nuts.pool.kern
+local insert_after = nuts.insert_after
+local traverse_id = nuts.traverse_id
+
+local getchar = nuts.getchar
+local getfont = nuts.getfont
+local getboth = nuts.getboth
+local getnext = nuts.getnext
+local getattr = nuts.getattr
+local getid = nuts.getid
+local getsubtype = nuts.getsubtype
+local getwidth = nuts.getwidth
+local setwidth = nuts.setwidth
+
+local parameters = fonts.hashes.parameters
+local categories = characters.categories
+
+local texsetattribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+
+local period = 0x2E
+local factor = 0.5
+
+-- alternative: tex.getlccode and tex.getuccode
+
+typesetters = typesetters or { }
+local typesetters = typesetters
+
+local periodkerns = typesetters.periodkerns or { }
+typesetters.periodkerns = periodkerns
+
+local report = logs.reporter("period kerns")
+local trace = false
+
+trackers.register("typesetters.periodkerns",function(v) trace = v end)
+
+periodkerns.mapping = periodkerns.mapping or { }
+periodkerns.factors = periodkerns.factors or { }
+local a_periodkern = attributes.private("periodkern")
+
+storage.register("typesetters/periodkerns/mapping", periodkerns.mapping, "typesetters.periodkerns.mapping")
+storage.register("typesetters/periodkerns/factors", periodkerns.factors, "typesetters.periodkerns.factors")
+
+local mapping = periodkerns.mapping
+local factors = periodkerns.factors
+
+function periodkerns.handler(head)
+ local done = false
+ local hnut = tonut(head)
+ for current in traverse_id(glyph_code,tonut(hnut)) do
+ if getchar(current) == period then
+ local a = getattr(current,a_periodkern)
+ if a then
+ local factor = mapping[a]
+ if factor then
+ local prev, next = getboth(current)
+ if prev and next and getid(prev) == glyph_code and getid(next) == glyph_code then
+ local pchar = getchar(prev)
+ local pcode = categories[getchar(prev)]
+ if pcode == "lu" or pcode == "ll" then
+ local nchar = getchar(next)
+ local ncode = categories[getchar(next)]
+ if ncode == "lu" or ncode == "ll" then
+ local next2 = getnext(next)
+ if next2 and getid(next2) == glyph_code and getchar(next2) == period then
+ -- A.B.
+ local fontspace, inserted
+ if factor ~= 0 then
+ fontspace = parameters[getfont(current)].space -- can be sped up
+ inserted = factor * fontspace
+ insert_after(hnut,current,new_kern(inserted))
+ if trace then
+ report("inserting space at %C . [%p] %C .",pchar,inserted,nchar)
+ end
+ done = true
+ end
+ local next3 = getnext(next2)
+ if next3 and getid(next3) == glue_code and getsubtype(next3) == spaceskip_code then
+ local width = getwidth(next3)
+ local space = fontspace or parameters[getfont(current)].space -- can be sped up
+ if width > space then -- space + extraspace
+ local next4 = getnext(next3)
+ if next4 and getid(next4) == glyph_code then
+ local fchar = getchar(next4)
+ if categories[fchar] ~= "lu" then
+ -- A.B.X
+ if trace then
+ if inserted then
+ report("reverting space at %C . [%p] %C . [%p->%p] %C",pchar,inserted,nchar,width,space,fchar)
+ else
+ report("reverting space at %C . %C . [%p->%p] %C",pchar,nchar,width,space,fchar)
+ end
+ end
+ setwidth(next3,space)
+ done = true
+ else
+ if trace then
+ if inserted then
+ report("keeping space at %C . [%p] %C . [%p] %C",pchar,inserted,nchar,width,fchar)
+ else
+ report("keeping space at %C . %C . [%p] %C",pchar,nchar,width,fchar)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ return head, done
+end
+
+local enabled = false
+
+function periodkerns.set(factor)
+ factor = tonumber(factor) or 0
+ if not enabled then
+ prependaction("processors","normalizers","typesetters.periodkerns.handler")
+ enableaction("processors","typesetters.periodkerns.handler")
+ enabled = true
+ end
+ local a = factors[factor]
+ if not a then
+ a = #mapping + 1
+ factors[factors], mapping[a] = a, factor
+ end
+ factor = a
+ texsetattribute(a_periodkern,factor)
+ return factor
+end
+
+-- interface
+
+interfaces.implement {
+ name = "setperiodkerning",
+ actions = periodkerns.set,
+ arguments = "string"
+}
+
+
diff --git a/tex/context/base/mkiv/typo-pnc.mkiv b/tex/context/base/mkiv/typo-pnc.mkiv
new file mode 100644
index 000000000..39d0c85f4
--- /dev/null
+++ b/tex/context/base/mkiv/typo-pnc.mkiv
@@ -0,0 +1,55 @@
+%D \module
+%D [ file=typo-pnc,
+%D version=2017.06.23, % experiment in project
+%D title=\CONTEXT\ Typesetting Macros,
+%D subtitle=Punctuation,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Typesetting Macros / Punctuation}
+
+\unprotect
+
+% In manuals we often have "foo e.g. bar" where we then have to compensate spaces
+% with "foo e.g.\ bar" so let's see if we can automate that and at the same time
+% inject spaces between the snippets. It gets boring writing this kind of code.
+
+\registerctxluafile{typo-pnc}{}
+
+\definesystemattribute[periodkern][public,global]
+
+\installcorenamespace{periodkerning}
+
+\installcommandhandler \??periodkerning {periodkerning} \??periodkerning
+
+\unexpanded\def\setperiodkerning[#1]%
+ {\edef\currentperiodkerning{#1}%
+ \ifx\currentperiodkerning\s!reset
+ \resetperiodkerning
+ \else
+ \typo_periodkerning_set
+ \fi}
+
+\def\typo_periodkerning_set
+ {\clf_setperiodkerning{\periodkerningparameter\c!factor}}
+
+\unexpanded\def\resetperiodkerning % fast one
+ {\attribute\periodkernattribute\attributeunsetvalue}
+
+\appendtoks
+ \resetperiodkerning
+\to \everyresettypesetting
+
+\defineperiodkerning [zerospaceperiods] [\c!factor=0]
+\defineperiodkerning [smallspaceperiods] [\c!factor=.25]
+\defineperiodkerning [halfspaceperiods] [\c!factor=.5]
+
+% If we ever need a command, see typo-krn.mkiv but it doesn't make much sense
+% to have it.
+
+\protect
diff --git a/tex/context/base/mkiv/typo-prc.lua b/tex/context/base/mkiv/typo-prc.lua
index cde66df00..2704149db 100644
--- a/tex/context/base/mkiv/typo-prc.lua
+++ b/tex/context/base/mkiv/typo-prc.lua
@@ -41,9 +41,9 @@ local becomes = P('->')
local processor = (1-becomes)^1
local splitter = C(processor) * becomes * Cs(patterns.argument + patterns.content)
-function processors.split(str)
+function processors.split(str,nocheck)
local p, s = lpegmatch(splitter,str)
- if registered[p] then
+ if p and (nocheck or registered[p]) then
return p, s
else
return false, str
diff --git a/tex/context/base/mkiv/typo-prc.mkvi b/tex/context/base/mkiv/typo-prc.mkvi
index 49a165696..859c33a65 100644
--- a/tex/context/base/mkiv/typo-prc.mkvi
+++ b/tex/context/base/mkiv/typo-prc.mkvi
@@ -16,8 +16,8 @@
%D For the moment manipulators are loaded here too, as they're in the same
%D category as processors. This might change. (They are used in publications.)
-\registerctxluafile{typo-prc}{1.001}
-\registerctxluafile{typo-man}{1.001}
+\registerctxluafile{typo-prc}{}
+\registerctxluafile{typo-man}{}
\unprotect
diff --git a/tex/context/base/mkiv/typo-rep.lua b/tex/context/base/mkiv/typo-rep.lua
index 5266aa103..b6aae0cae 100644
--- a/tex/context/base/mkiv/typo-rep.lua
+++ b/tex/context/base/mkiv/typo-rep.lua
@@ -51,11 +51,9 @@ nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping
stripping.glyphs = stripping.glyphs or { } local glyphs = stripping.glyphs
local function initialize()
- for k,v in next, chardata do
- if v.category == "cf" and v.visible ~= "yes" then
- if not glyphs[k] then
- glyphs[k] = true
- end
+ for k, v in next, chardata do
+ if v.category == "cf" and not v.visible and not glyphs[k] then
+ glyphs[k] = true
end
end
initialize = nil
diff --git a/tex/context/base/mkiv/typo-rep.mkiv b/tex/context/base/mkiv/typo-rep.mkiv
index c1146997e..df8408999 100644
--- a/tex/context/base/mkiv/typo-rep.mkiv
+++ b/tex/context/base/mkiv/typo-rep.mkiv
@@ -33,7 +33,7 @@
\unprotect
-\registerctxluafile{typo-rep}{1.001}
+\registerctxluafile{typo-rep}{}
\definesystemattribute[stripping][public]
diff --git a/tex/context/base/mkiv/typo-rub.lua b/tex/context/base/mkiv/typo-rub.lua
index 9621a6218..8c41a5611 100644
--- a/tex/context/base/mkiv/typo-rub.lua
+++ b/tex/context/base/mkiv/typo-rub.lua
@@ -39,8 +39,6 @@ local getid = nuts.getid
local getsubtype = nuts.getsubtype
local getattr = nuts.getattr
local setattr = nuts.setattr
-local getfield = nuts.getfield
-local setfield = nuts.setfield
local getnext = nuts.getnext
local setnext = nuts.setnext
local getprev = nuts.getprev
@@ -300,9 +298,6 @@ local function whatever(current)
else
local left = new_kern(delta/2)
local right = new_kern(delta/2)
--- setlink(left,start)
--- setlink(stop,right)
--- setlink(text,left)
setlink(text,left,start)
setlink(stop,right)
end
@@ -354,17 +349,11 @@ local function whatever(current)
elseif align == v_flushright then
local left = new_kern(-delta)
local right = new_kern(delta)
--- setlink(left,text)
--- setlink(text,right)
--- setlink(right,start)
- setlink(left,text,right,start)
- setlist(current,left)
+ setlink(left,text,right,start)
+ setlist(current,left)
else
local left = new_kern(-delta/2)
local right = new_kern(delta/2)
--- setlink(left,text)
--- setlink(text,right)
--- setlink(right,start)
setlink(left,text,right,start)
setlist(current,left)
end
diff --git a/tex/context/base/mkiv/typo-rub.mkiv b/tex/context/base/mkiv/typo-rub.mkiv
index 7b996089b..d51c53aa4 100644
--- a/tex/context/base/mkiv/typo-rub.mkiv
+++ b/tex/context/base/mkiv/typo-rub.mkiv
@@ -19,7 +19,7 @@
\unprotect
-\registerctxluafile{typo-rub}{1.001}
+\registerctxluafile{typo-rub}{}
\definesystemattribute[ruby][public]
diff --git a/tex/context/base/mkiv/typo-scr.mkiv b/tex/context/base/mkiv/typo-scr.mkiv
index 7b8d62dfb..d4881b80a 100644
--- a/tex/context/base/mkiv/typo-scr.mkiv
+++ b/tex/context/base/mkiv/typo-scr.mkiv
@@ -146,9 +146,9 @@
\setbox\plussix \hbox{\typo_scripts_lowhigh_low_high\raise\c!up \t!sup{#3}}%
\doif{#1}{\v!left}
{\ifdim\wd\plusfour<\wd\plussix
- \setbox\plusfour\hbox to \wd\plussix {\hss\box\plusfour}%
+ \setbox\plusfour\hpack to \wd\plussix {\hss\box\plusfour}%
\else
- \setbox\plussix \hbox to \wd\plusfour{\hss\box\plussix }%
+ \setbox\plussix \hpack to \wd\plusfour{\hss\box\plussix }%
\fi}%
\ifdim\wd\plusfour<\wd\plussix
\wd\plusfour\zeropoint
@@ -252,9 +252,9 @@
\fi
\fi
\relax
- \setbox\plustwo \hbox to \scratchdimen{\hss\lower\lowmidhighparameter\c!down\box\plustwo \hss}%
- \setbox\plusfour\hbox to \scratchdimen{\hss \box\plusfour\hss}%
- \setbox\plussix \hbox to \scratchdimen{\hss\raise\lowmidhighparameter\c!up \box\plussix \hss}%
+ \setbox\plustwo \hpack to \scratchdimen{\hss\lower\lowmidhighparameter\c!down\box\plustwo \hss}%
+ \setbox\plusfour\hpack to \scratchdimen{\hss \box\plusfour\hss}%
+ \setbox\plussix \hpack to \scratchdimen{\hss\raise\lowmidhighparameter\c!up \box\plussix \hss}%
\wd\plustwo \zeropoint
\wd\plusfour\zeropoint
\box\plusfour
diff --git a/tex/context/base/mkiv/typo-spa.mkiv b/tex/context/base/mkiv/typo-spa.mkiv
index 2e3e71bf3..fc73ef021 100644
--- a/tex/context/base/mkiv/typo-spa.mkiv
+++ b/tex/context/base/mkiv/typo-spa.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{typo-spa}{1.001}
+\registerctxluafile{typo-spa}{}
\definesystemattribute[spacing][public]
diff --git a/tex/context/base/mkiv/typo-sus.lua b/tex/context/base/mkiv/typo-sus.lua
index f728993f6..2d3037bdc 100644
--- a/tex/context/base/mkiv/typo-sus.lua
+++ b/tex/context/base/mkiv/typo-sus.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['typo-sus'] = {
license = "see context related readme files"
}
+local next = next
+
local punctuation = {
po = true,
}
@@ -44,7 +46,6 @@ local tonode = nodes.tonode
local getid = nuts.getid
local getprev = nuts.getprev
local getnext = nuts.getnext
-local getfield = nuts.getfield
local getattr = nuts.getattr
local getfont = nuts.getfont
local getlist = nuts.getlist
diff --git a/tex/context/base/mkiv/typo-sus.mkiv b/tex/context/base/mkiv/typo-sus.mkiv
index fe44e6327..508580e85 100644
--- a/tex/context/base/mkiv/typo-sus.mkiv
+++ b/tex/context/base/mkiv/typo-sus.mkiv
@@ -16,7 +16,7 @@
%D This is a rather special module, mostly needed by ourselves for
%D projects where copy||editing is not that precise.
-\registerctxluafile{typo-sus}{1.001}
+\registerctxluafile{typo-sus}{}
\unexpanded\def\showsuspects{\enabletrackers[typesetters.suspects]}
diff --git a/tex/context/base/mkiv/typo-tal.lua b/tex/context/base/mkiv/typo-tal.lua
index 67380f24b..870d006cc 100644
--- a/tex/context/base/mkiv/typo-tal.lua
+++ b/tex/context/base/mkiv/typo-tal.lua
@@ -14,7 +14,7 @@ if not modules then modules = { } end modules ['typo-tal'] = {
-- We can speed up by saving the current fontcharacters[font] + lastfont.
-local next, type = next, type
+local next, type, tonumber = next, type, tonumber
local div = math.div
local utfbyte = utf.byte
@@ -38,6 +38,7 @@ local tonode = nuts.tonode
local getnext = nuts.getnext
local getprev = nuts.getprev
+local getboth = nuts.getboth
local getid = nuts.getid
local getfont = nuts.getfont
local getchar = nuts.getchar
@@ -100,7 +101,7 @@ local validsigns = {
-- If needed we can have more modes which then also means a faster simple handler
-- for non numbers.
-local function setcharacteralign(column,separator)
+local function setcharacteralign(column,separator,before,after)
if not enabled then
enableaction("processors","typesetters.characteralign.handler")
enabled = true
@@ -123,11 +124,14 @@ local function setcharacteralign(column,separator)
separator = comma
method = v_number
end
+ local before = tonumber(before) or 0
+ local after = tonumber(after) or 0
dataset = {
separator = separator,
list = { },
- maxafter = 0,
- maxbefore = 0,
+ maxbefore = before,
+ maxafter = after,
+ predefined = before > 0 or after > 0,
collected = false,
method = method,
separators = validseparators,
@@ -152,6 +156,12 @@ implement {
arguments = { "integer", "string" }
}
+implement {
+ name = "setcharacteraligndetail",
+ actions = setcharacteralign,
+ arguments = { "integer", "string", "dimension", "dimension" }
+}
+
implement {
name = "resetcharacteralign",
actions = resetcharacteralign
@@ -257,10 +267,9 @@ function characteralign.handler(originalhead,where)
elseif (b_start or a_start) and id == glue_code then
-- maybe only in number mode
-- somewhat inefficient
- local next = getnext(current)
- local prev = getprev(current)
+ local prev, next = getboth(current)
if next and prev and getid(next) == glyph_code and getid(prev) == glyph_code then -- too much checking
- local width = fontcharacters[getfont(b_start)][separator or period].width
+ local width = fontcharacters[getfont(b_start or a_start)][separator or period].width
setglue(current,width)
setattr(current,a_character,punctuationspace)
if a_start then
@@ -310,8 +319,25 @@ function characteralign.handler(originalhead,where)
current = getnext(current)
end
end
- local entry = list[row]
- if entry then
+ local predefined = dataset.predefined
+ local before, after
+ if predefined then
+ before = b_start and list_dimensions(b_start,getnext(b_stop)) or 0
+ after = a_start and list_dimensions(a_start,getnext(a_stop)) or 0
+ else
+ local entry = list[row]
+ if entry then
+ before = entry.before or 0
+ after = entry.after or 0
+ else
+ before = b_start and list_dimensions(b_start,getnext(b_stop)) or 0
+ after = a_start and list_dimensions(a_start,getnext(a_stop)) or 0
+ list[row] = {
+ before = before,
+ after = after,
+ }
+ return tonode(head), true
+ end
if not dataset.collected then
-- print("[maxbefore] [maxafter]")
local maxbefore = 0
@@ -330,61 +356,53 @@ function characteralign.handler(originalhead,where)
dataset.maxafter = maxafter
dataset.collected = true
end
- local maxbefore = dataset.maxbefore
- local maxafter = dataset.maxafter
- local before = entry.before or 0
- local after = entry.after or 0
- local new_kern = trace_split and traced_kern or new_kern
- if b_start then
- if before < maxbefore then
- head = insert_node_before(head,b_start,new_kern(maxbefore-before))
- end
- if not c then
- -- print("[before]")
- if dataset.hasseparator then
- local width = fontcharacters[getfont(b_stop)][separator].width
- insert_node_after(head,b_stop,new_kern(maxafter+width))
- end
- elseif a_start then
- -- print("[before] [separator] [after]")
- if after < maxafter then
- insert_node_after(head,a_stop,new_kern(maxafter-after))
- end
- else
- -- print("[before] [separator]")
- if maxafter > 0 then
- insert_node_after(head,c,new_kern(maxafter))
- end
+ end
+ local maxbefore = dataset.maxbefore
+ local maxafter = dataset.maxafter
+ local new_kern = trace_split and traced_kern or new_kern
+ if b_start then
+ if before < maxbefore then
+ head = insert_node_before(head,b_start,new_kern(maxbefore-before))
+ end
+ if not c then
+ -- print("[before]")
+ if dataset.hasseparator then
+ local width = fontcharacters[getfont(b_start)][separator].width
+ insert_node_after(head,b_stop,new_kern(maxafter+width))
end
elseif a_start then
- if c then
- -- print("[separator] [after]")
- if maxbefore > 0 then
- head = insert_node_before(head,c,new_kern(maxbefore))
- end
- else
- -- print("[after]")
- local width = fontcharacters[getfont(b_stop)][separator].width
- head = insert_node_before(head,a_start,new_kern(maxbefore+width))
- end
+ -- print("[before] [separator] [after]")
if after < maxafter then
insert_node_after(head,a_stop,new_kern(maxafter-after))
end
- elseif c then
- -- print("[separator]")
- if maxbefore > 0 then
- head = insert_node_before(head,c,new_kern(maxbefore))
- end
+ else
+ -- print("[before] [separator]")
if maxafter > 0 then
insert_node_after(head,c,new_kern(maxafter))
end
end
- else
- entry = {
- before = b_start and list_dimensions(b_start,getnext(b_stop)) or 0,
- after = a_start and list_dimensions(a_start,getnext(a_stop)) or 0,
- }
- list[row] = entry
+ elseif a_start then
+ if c then
+ -- print("[separator] [after]")
+ if maxbefore > 0 then
+ head = insert_node_before(head,c,new_kern(maxbefore))
+ end
+ else
+ -- print("[after]")
+ local width = fontcharacters[getfont(b_stop)][separator].width
+ head = insert_node_before(head,a_start,new_kern(maxbefore+width))
+ end
+ if after < maxafter then
+ insert_node_after(head,a_stop,new_kern(maxafter-after))
+ end
+ elseif c then
+ -- print("[separator]")
+ if maxbefore > 0 then
+ head = insert_node_before(head,c,new_kern(maxbefore))
+ end
+ if maxafter > 0 then
+ insert_node_after(head,c,new_kern(maxafter))
+ end
end
return tonode(head), true
end
diff --git a/tex/context/base/mkiv/typo-tal.mkiv b/tex/context/base/mkiv/typo-tal.mkiv
index 570f1a1f5..7170578d1 100644
--- a/tex/context/base/mkiv/typo-tal.mkiv
+++ b/tex/context/base/mkiv/typo-tal.mkiv
@@ -22,7 +22,7 @@
\unprotect
-\registerctxluafile{typo-tal}{1.001}
+\registerctxluafile{typo-tal}{}
\definesystemattribute[characteralign][public]
@@ -56,10 +56,11 @@
% D
% D \typebuffer \blank \getbuffer \blank
-\unexpanded\def\signalcharacteralign#1#2{\attribute\characteralignattribute\numexpr#1*\maxcardminusone+#2\relax} % 0xFFFF
-\unexpanded\def\setcharacteralign #1#2{\clf_setcharacteralign#1{#2}}
-\unexpanded\def\resetcharacteralign {\clf_resetcharacteralign}
-\unexpanded\def\nocharacteralign {\attribute\characteralignattribute\attributeunsetvalue}
+\unexpanded\def\signalcharacteralign #1#2{\attribute\characteralignattribute\numexpr#1*\maxcardminusone+#2\relax} % 0xFFFF
+\unexpanded\def\setcharacteralign #1#2{\clf_setcharacteralign#1{#2}}
+\unexpanded\def\resetcharacteralign {\clf_resetcharacteralign}
+\unexpanded\def\nocharacteralign {\attribute\characteralignattribute\attributeunsetvalue}
+\unexpanded\def\setcharacteraligndetail#1#2#3#4{\clf_setcharacteraligndetail#1{#2}#3#4\relax}
%D Mostly downward compatible:
%D
@@ -73,18 +74,54 @@
%D \stopbuffer
%D
%D \typebuffer \blank \getbuffer \blank
-
+%D
+%D \startbuffer
+%D \startcharacteralign[leftsample=123.456,rightsample=00,character={,}]
+%D \checkcharacteralign{123.456,78}\par
+%D \checkcharacteralign {456}\par
+%D \checkcharacteralign {23.456}\par
+%D \checkcharacteralign {78,9}\par
+%D \checkcharacteralign {78}\par
+%D \stopcharacteralign
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer \blank
+%D
+%D \startbuffer
+%D \startcharacteralign[leftwidth=123.456,rightwidth=00,character={,}]
+%D \checkcharacteralign{123.456,78}\par
+%D \checkcharacteralign {456}\par
+%D \checkcharacteralign {23.456}\par
+%D \checkcharacteralign {78,9}\par
+%D \checkcharacteralign {78}\par
+%D \stopcharacteralign
+%D \stopbuffer
+%D
+%D \typebuffer \blank \getbuffer \blank
+%D
%D We have (currently) two modes: \type {text} and \type {number}. The handler tries
%D to determine the mode automatically. When using periods and commas as separators
%D the \type {number} mode is chosen. If you use for instance a \type {-} as
%D separator, \type {text} is chosen, but you can enforce \type {number} with \type
-%D {number->-} (as with other mechanisms, the arrow indicates a methot to apply).
+%D {number->-} (as with other mechanisms, the arrow indicates a method to apply).
%D
%D One can use \type {\nocharacteralign} to disable this mechanism, for instance in
%D a table cell.
\def\alignmentcharacter{,}
+\installcorenamespace{characteralign}
+
+\installparameterhandler\??characteralign {characteralign}
+\installsetuphandler \??characteralign {characteralign}
+
+\setupcharacteralign
+ [\c!leftwidth =\zeropoint,
+ \c!rightwidth =\zeropoint,
+ \c!leftsample =,
+ \c!rightsample=,
+ \c!character =\alignmentcharacter]
+
\unexpanded\def\typo_charalign_pass_one
{\advance\scratchcounter\plusone
\setbox\scratchbox\typo_charalign_pass}
@@ -99,13 +136,71 @@
\unexpanded\def\startcharacteralign
{\dosingleempty\typo_charalign_start}
-\def\typo_charalign_start[#1]#2\stopcharacteralign
+\def\typo_charalign_start[#1]%
+ {\doifelseassignment{#1}\typo_charalign_start_one\typo_charalign_start_two{#1}}
+
+\def\typo_charalign_start_one#1#2\stopcharacteralign
+ {\bgroup
+ % for now no instances
+ \setupcurrentcharacteralign[#1]%
+ \edef\p_left {\characteralignparameter\c!leftsample}%
+ \edef\p_right{\characteralignparameter\c!rightsample}%
+ \ifx\p_left\empty
+ \scratchdimenone\dimexpr\characteralignparameter\c!leftwidth\relax
+ \else
+ \setbox\scratchbox\hbox{\p_left}%
+ \scratchdimenone\wd\scratchbox
+ \fi
+ \ifx\p_right\empty
+ \scratchdimentwo\dimexpr\characteralignparameter\c!rightwidth\relax
+ \else
+ \setbox\scratchbox\hbox{\p_right}%
+ \scratchdimentwo\wd\scratchbox
+ \fi
+ \ifzeropt\scratchdimenone
+ \ifzeropt\scratchdimentwo
+ \donefalse
+ \else
+ \donetrue
+ \fi
+ \else
+ \donetrue
+ \fi
+ \edef\alignmentcharacter{\characteralignparameter\c!character}%
+ \ifdone
+ \clf_setcharacteraligndetail
+ \plusone
+ \alignmentcharacter
+ \scratchdimenone
+ \scratchdimentwo
+ \else
+ \clf_setcharacteralign
+ \plusone
+ \alignmentcharacter
+ \begingroup
+ \scratchcounter\zerocount
+ \let\checkcharacteralign\typo_charalign_pass_one
+ \settrialtypesetting
+ #2\relax
+ \endgroup
+ \fi
+ \begingroup
+ \scratchcounter\zerocount
+ \let\checkcharacteralign\typo_charalign_pass_two
+ #2\relax
+ \endgroup
+ \resetcharacteralign
+ \egroup}
+
+\def\typo_charalign_start_two#1#2\stopcharacteralign
{\bgroup
\edef\m_temp{#1}%
\ifx\m_temp\empty \else
\let\alignmentcharacter\m_temp
\fi
- \setcharacteralign\plusone\alignmentcharacter
+ \clf_setcharacteralign
+ \plusone
+ \alignmentcharacter
\begingroup
\scratchcounter\zerocount
\let\checkcharacteralign\typo_charalign_pass_one
diff --git a/tex/context/base/mkiv/typo-txt.mkvi b/tex/context/base/mkiv/typo-txt.mkvi
index 7562fe70c..e7d16fd00 100644
--- a/tex/context/base/mkiv/typo-txt.mkvi
+++ b/tex/context/base/mkiv/typo-txt.mkvi
@@ -17,7 +17,7 @@
\unprotect
-% registerctxluafile{typo-txt}{1.001}
+% registerctxluafile{typo-txt}{}
%D \macros
%D {normalizefontheight,normalizefontwidth,normalizedfontsize}
diff --git a/tex/context/base/mkiv/typo-wrp.mkiv b/tex/context/base/mkiv/typo-wrp.mkiv
index 081349050..0e010515b 100644
--- a/tex/context/base/mkiv/typo-wrp.mkiv
+++ b/tex/context/base/mkiv/typo-wrp.mkiv
@@ -15,7 +15,7 @@
\unprotect
-\registerctxluafile{typo-wrp}{1.001}
+\registerctxluafile{typo-wrp}{}
%D This definition has moved from page-lin.tex to spac-hor.tex (due to
%D visualization added in august 2003) and now to here (november 2014)
diff --git a/tex/context/base/mkiv/unic-ini.lua b/tex/context/base/mkiv/unic-ini.lua
index 132c92efa..07e3a64c3 100644
--- a/tex/context/base/mkiv/unic-ini.lua
+++ b/tex/context/base/mkiv/unic-ini.lua
@@ -6,6 +6,7 @@ if not modules then modules = { } end modules ['unic-ini'] = {
license = "see context related readme files"
}
+local context = context
local utfchar = utf.char
-- Beware, initializing unicodechar happens at first usage and takes
diff --git a/tex/context/base/mkiv/unic-ini.mkiv b/tex/context/base/mkiv/unic-ini.mkiv
index 13ad4bdb9..684e3aa04 100644
--- a/tex/context/base/mkiv/unic-ini.mkiv
+++ b/tex/context/base/mkiv/unic-ini.mkiv
@@ -13,7 +13,7 @@
\writestatus{loading}{ConTeXt Unicode Support / Initialization}
-\registerctxluafile{unic-ini}{1.001}
+\registerctxluafile{unic-ini}{}
\unprotect
@@ -29,7 +29,7 @@
\def\unicodechar#1{\clf_unicodechar{#1}}
\unexpanded\def\unknownchar
- {\dontleavehmode\hbox{\vrule\s!width.5\emwidth\s!height\exheight\s!depth\zeropoint}}
+ {\dontleavehmode\hpack{\vrule\s!width.5\emwidth\s!height\exheight\s!depth\zeropoint}}
\ifdefined\zwnbsp\else \let\zwnbsp\relax \fi % zerowidthnonbreakablespace
diff --git a/tex/context/base/mkiv/util-deb.lua b/tex/context/base/mkiv/util-deb.lua
index 57e015386..b8db0c583 100644
--- a/tex/context/base/mkiv/util-deb.lua
+++ b/tex/context/base/mkiv/util-deb.lua
@@ -145,7 +145,8 @@ function debugger.showstats(printer,threshold)
local functions = 0
local dataset = { }
local length = 0
- local wholetime = 0
+ local realtime = 0
+ local totaltime = 0
local threshold = threshold or 0
for name, sources in next, names do
for source, lines in next, sources do
@@ -162,8 +163,9 @@ function debugger.showstats(printer,threshold)
if real < 0 then
real = 0
end
- wholetime = wholetime + real
+ realtime = realtime + real
end
+ totaltime = totaltime + total
if line < 0 then
line = 0
end
@@ -201,23 +203,22 @@ function debugger.showstats(printer,threshold)
if length > 50 then
length = 50
end
- local fmt = string.formatters["%4.9k %4.9k %3.3k %8i %-" .. length .. "s %4i %s"]
+ local fmt = string.formatters["%4.9k s %3.3k %% %4.9k s %3.3k %% %8i # %-" .. length .. "s %4i %s"]
for i=1,#dataset do
- local data = dataset[i]
- local real = data[1]
- local total = data[2]
- local count = data[3]
- local name = data[4]
- local source = data[5]
- local line = data[6]
- local percent = real / wholetime
+ local data = dataset[i]
+ local real = data[1]
+ local total = data[2]
+ local count = data[3]
+ local name = data[4]
+ local source = data[5]
+ local line = data[6]
calls = calls + count
functions = functions + 1
name = gsub(name,"%s+"," ")
if #name > length then
name = sub(name,1,length)
end
- printer(fmt(seconds(total),seconds(real),percent,count,name,line,source))
+ printer(fmt(seconds(total),100*total/totaltime,seconds(real),100*real/realtime,count,name,line,source))
end
printer("")
printer(format("functions : %i", functions))
diff --git a/tex/context/base/mkiv/util-dim.lua b/tex/context/base/mkiv/util-dim.lua
index 2bdb870e7..fd9351a28 100644
--- a/tex/context/base/mkiv/util-dim.lua
+++ b/tex/context/base/mkiv/util-dim.lua
@@ -429,7 +429,7 @@ end
stringtodimen = string.todimen -- local variable defined earlier
function number.toscaled(d)
- return format("%0.5f",d/2^16)
+ return format("%0.5f",d/0x10000) -- 2^16
end
--[[ldx--
diff --git a/tex/context/base/mkiv/util-env.lua b/tex/context/base/mkiv/util-env.lua
index 0b832e72e..064bd513a 100644
--- a/tex/context/base/mkiv/util-env.lua
+++ b/tex/context/base/mkiv/util-env.lua
@@ -17,56 +17,58 @@ local environment = environment
-- locales are a useless feature in and even dangerous for luatex
-os.setlocale(nil,nil) -- setlocale("all","C")
+local setlocale = os.setlocale
-function os.setlocale()
- -- no way you can mess with it
-end
+setlocale(nil,nil) -- setlocale("all","C")
--- do
---
--- local setlocale = os.setlocale
---
--- function os.resetlocale()
--- setlocale(nil,nil)
--- end
---
--- function os.pushlocale(l,...)
--- insert(stack, {
--- collate = setlocale(nil,"collate"),
--- ctype = setlocale(nil,"ctype"),
--- monetary = setlocale(nil,"monetary"),
--- numeric = setlocale(nil,"numeric"),
--- time = setlocale(nil,"time"),
--- })
--- if l then
--- setlocale(l,...)
--- else
--- setlocale(status.lc_collate ,"collate"),
--- setlocale(status.lc_ctype ,"ctype"),
--- setlocale(status.lc_monetary,"monetary"),
--- setlocale(status.lc_numeric ,"numeric"),
--- setlocale(status.lc_time ,"time"),
--- end
--- end
+-- function os.resetlocale()
+-- setlocale(nil,nil)
+-- end
--
--- function os.poplocale(...)
--- local l = remove(stack)
--- if l then
--- setlocale(unpack(l))
--- else
--- resetlocale()
--- end
+-- function os.pushlocale(l,...)
+-- insert(stack, {
+-- collate = setlocale(nil,"collate"),
+-- ctype = setlocale(nil,"ctype"),
+-- monetary = setlocale(nil,"monetary"),
+-- numeric = setlocale(nil,"numeric"),
+-- time = setlocale(nil,"time"),
+-- })
+-- if l then
+-- setlocale(l,...)
+-- else
+-- setlocale(status.lc_collate ,"collate"),
+-- setlocale(status.lc_ctype ,"ctype"),
+-- setlocale(status.lc_monetary,"monetary"),
+-- setlocale(status.lc_numeric ,"numeric"),
+-- setlocale(status.lc_time ,"time"),
-- end
+-- end
--
--- function os.setlocale()
--- -- no way you can mess with it, use push/pop
+-- function os.poplocale()
+-- local l = remove(stack)
+-- if l then
+-- setlocale(unpack(l))
+-- else
+-- resetlocale()
-- end
---
--- setlocale(nil,nil) -- setlocale("all","C")
---
-- end
+local report = logs.reporter("system")
+
+function os.setlocale(a,b)
+ if a or b then
+ if report then
+ report()
+ report("You're messing with os.locale in a supposedly locale neutral enviroment. From")
+ report("now on are on your own and without support. Crashes or unexpected side effects")
+ report("can happen but don't bother the luatex and context developer team with it.")
+ report()
+ report = nil
+ end
+ setlocale(a,b)
+ end
+end
+
-- dirty tricks (we will replace the texlua call by luatex --luaonly)
local validengines = allocate {
diff --git a/tex/context/base/mkiv/util-evo-imp-server.lua b/tex/context/base/mkiv/util-evo-imp-server.lua
new file mode 100644
index 000000000..f4dd5b3f7
--- /dev/null
+++ b/tex/context/base/mkiv/util-evo-imp-server.lua
@@ -0,0 +1,127 @@
+if not modules then modules = { } end modules ['util-imp-evohome-server'] = {
+ version = 1.002,
+ comment = "simple server for simple evohome extensions",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE",
+ license = "see context related readme files"
+}
+
+local P, C, patterns, lpegmatch = lpeg.P, lpeg.C, lpeg.patterns, lpeg.match
+local urlhashed, urlquery, urlunescapeget = url.hashed, url.query, url.unescapeget
+local ioflush = io.flush
+
+local newline = patterns.newline
+local spacer = patterns.spacer
+local whitespace = patterns.whitespace
+local method = P("GET")
+ + P("POST")
+local identify = (1-method)^0
+ * C(method)
+ * spacer^1
+ * C((1-spacer)^1)
+ * spacer^1
+ * P("HTTP/")
+ * (1-whitespace)^0
+ * C(P(1)^0)
+
+do
+
+ local loaded = package.loaded
+
+ if not loaded.socket then loaded.socket = loaded["socket.core"] end
+ if not loaded.mime then loaded.mime = loaded["mime.core"] end
+
+end
+
+local evohome = require("util-evo")
+ require("trac-lmx")
+
+local report = logs.reporter("evohome","server")
+local convert = lmx.convert
+
+function evohome.server(specification)
+
+ local filename = specification.filename
+
+ if not filename then
+ report("unable to run server, no filename given")
+ return
+ end
+
+ local step, process, presets = evohome.actions.poller(filename)
+
+ if not (step and process and presets) then
+ report("unable to run server, invalid presets")
+ return
+ end
+
+ local template = presets.files.template
+
+ if not template then
+ report("unable to run server, no template given")
+ return
+ end
+
+ local port = specification.port or (presets.server and presets.server.port) or 8068
+ local host = specification.host or (presets.server and presets.server.host) or "*"
+
+ package.extraluapath(presets.filepath)
+
+ local socket = socket or require("socket")
+ local copas = copas or require("copas")
+
+ local function copashttp(skt)
+ local client = copas.wrap(skt)
+ local request, e = client:receive()
+ if not e then
+ local method, fullurl, body = lpegmatch(identify,request)
+ if method ~= "" and fullurl ~= "" then
+ local fullurl = urlunescapeget(fullurl)
+ local hashed = urlhashed(fullurl)
+ process(hashed.queries or { })
+ ioflush()
+ end
+ -- todo: split off css and use that instead of general one, now too much
+ local content = convert(presets.results and presets.results.template or template,false,presets)
+ if not content then
+ report("error in converting template")
+ content = "error in template"
+ end
+ client:send("HTTP/1.1 200 OK\r\n")
+ client:send("Connection: close\r\n")
+ client:send("Content-Length: " .. #content .. "\r\n")
+ client:send("Content-Type: text/html\r\n")
+ client:send("Location: " .. host .. "\r\n")
+ client:send("Cache-Control: no-cache, no-store, must-revalidate, max-age=0\r\n")
+ client:send("\r\n")
+ client:send(content)
+ client:send("\r\n")
+ client:close()
+ end
+ end
+
+ local function copaspoll()
+ while step do
+ local delay = step()
+ if type(delay) == "number" then
+ copas.sleep(delay or 0)
+ end
+ end
+ end
+
+ local server = socket.bind(host,port)
+
+ if server then
+ report("server started at %s:%s",host,port)
+ ioflush()
+ copas.addserver(server,copashttp)
+ copas.addthread(copaspoll)
+ copas.loop()
+ else
+ report("unable to start server at %s:%s",host,port)
+ os.exit()
+ end
+
+end
+
+return evohome
diff --git a/tex/context/base/mkiv/util-evo.lua b/tex/context/base/mkiv/util-evo.lua
new file mode 100644
index 000000000..7f0b59ac4
--- /dev/null
+++ b/tex/context/base/mkiv/util-evo.lua
@@ -0,0 +1,1002 @@
+if not modules then modules = { } end modules ['util-evo'] = {
+ version = 1.002,
+ comment = "library for fetching data from an evohome device",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE",
+ license = "see context related readme files"
+}
+
+-- When I needed a new boiler for heating I decided to replace a partial
+-- (experimental) zwave few-zone solution by the honeywell evohome system that can
+-- drive opentherm. I admit that I was not that satified beforehand with the fact
+-- that one has to go via some outside portal to communicate with the box but lets
+-- hope that this will change (I will experiment with the additional usb interface
+-- later). Anyway, apart from integrating it into my home automation setup so that I
+-- can add control based on someone present in a zone, I wanted to be able to render
+-- statistics. So that's why we have a module in ConTeXt for doing that. It's also
+-- an example of Lua and abusing LuaTeX for something not related to typesetting.
+--
+-- As with other scripts, it assumes that mtxrun is used so that we have the usual
+-- Lua libraries present.
+--
+-- The code is not that complex but figuring out the right request takes bit of
+-- searching the web. There is an api specification at:
+--
+-- https://developer.honeywell.com/api-methods?field_smart_method_tags_tid=All
+--
+-- Details like the application id can be found in several places. There are snippets
+-- of (often partial or old) code on the web but still one needs to experiment and
+-- combine information. We assume unique zone names and ids across gateways; I only
+-- have one installed anyway.
+--
+-- The original application was to just get the right information for generating
+-- statistics but in the meantime I also use this code to add additional functionality
+-- to the system, for instance switching between rooms (office, living room, attic) and
+-- absence for one or more rooms.
+
+-- todo: %path% in filenames
+
+require("util-jsn")
+
+local next, type, setmetatable, rawset, rawget = next, type, setmetatable, rawset, rawget
+local json = utilities.json
+local formatters = string.formatters
+local floor, div = math.floor, math.div
+local resultof, ostime, osdate, ossleep = os.resultof, os.time, os.date, os.sleep
+local jsontolua, jsontostring = json.tolua, json.tostring
+local savetable, loadtable, sortedkeys = table.save, table.load, table.sortedkeys
+local setmetatableindex, setmetatablenewindex = table.setmetatableindex, table.setmetatablenewindex
+local replacer = utilities.templates.replacer
+local lower = string.lower -- no utf support yet (encoding needs checking in evohome)
+
+local applicationid = "b013aa26-9724-4dbd-8897-048b9aada249"
+----- applicationid = "91db1612-73fd-4500-91b2-e63b069b185c"
+
+local report = logs.reporter("evohome")
+local trace = false
+
+trackers.register("evohome.trace",function(v) trace = v end) -- not yet used
+
+local defaultpresets = {
+ interval = 30 * 60,
+ files = {
+ everything = "evohome-everything.lua",
+ history = "evohome-history.lua",
+ latest = "evohome-latest.lua",
+ schedules = "evohome-schedules.lua",
+ actions = "evohome-actions.lua",
+ template = "evohome.lmx",
+ },
+ credentials = {
+ -- username = "unset",
+ -- password = "unset",
+ -- accesstoken = "unset",
+ -- userid = "unset",
+ },
+}
+
+local validzonetypes = {
+ ZoneTemperatureControl = true,
+ RadiatorZone = true,
+ ZoneValves = true,
+}
+
+local function validfile(presets,filename)
+ if lfs.isfile(filename) then
+ -- we're okay
+ return filename
+ end
+ if file.pathpart(filename) ~= "" then
+ -- can be a file that has to be created
+ return filename
+ end
+ local presetsname = presets.filename
+ if not presetsname then
+ -- hope for the best
+ return filename
+ end
+ -- we now have the full path
+ return file.join(file.pathpart(presetsname),filename)
+end
+
+local function validpresets(presets)
+ if type(presets) ~= "table" then
+ report("invalid presets, no table")
+ return
+ end
+ local credentials = presets.credentials
+ if not credentials then
+ report("invalid presets, no credentials")
+ return
+ end
+ local gateways = presets.gateways
+ if not gateways then
+ report("invalid presets, no gateways")
+ return
+ end
+ local files = presets.files
+ if not files then
+ report("invalid presets, no files")
+ return
+ end
+ for k, v in next, files do
+ files[k] = validfile(presets,v) or v
+ end
+ local data = presets.data
+ if not data then
+ data = { }
+ presets.data = data
+ end
+ local g = data.gateways
+ if not g then
+ local g = { }
+ data.gateways = g
+ for i=1,#gateways do
+ local gi = gateways[i]
+ g[gi.macaddress] = gi
+ end
+ end
+ local zones = data.zones
+ if not zones then
+ zones = { }
+ data.zones = zones
+ setmetatablenewindex(zones,function(t,k,v) rawset(t,lower(k),v) end)
+ setmetatableindex (zones,function(t,k) return rawget(t,lower(k)) end)
+ end
+ local states = data.states
+ if not states then
+ states = { }
+ data.states = states
+ setmetatablenewindex(states,function(t,k,v) rawset(t,lower(k),v) end)
+ setmetatableindex (states,function(t,k) return rawget(t,lower(k)) end)
+ end
+ setmetatableindex(presets,defaultpresets)
+ setmetatableindex(credentials,defaultpresets.credentials)
+ setmetatableindex(files,defaultpresets.files)
+ return presets
+end
+
+local function loadedtable(filename)
+ if type(filename) == "string" then
+ for i=1,10 do
+ local t = loadtable(filename)
+ if t then
+ return t
+ else
+ ossleep(1/4)
+ end
+ end
+ end
+ return { }
+end
+
+local function loadpresets(filename)
+ local presets = loadtable(filename)
+ if presets then
+ presets.filename = filename
+ presets.filepath = file.expandname(file.pathpart(filename))
+ -- package.extraluapath(presets.filepath) -- better do that elsewhere and once
+ end
+ return presets
+end
+
+local function loadhistory(filename)
+ if type(filename) == "table" and validpresets(filename) then
+ filename = filename.files and filename.files.history
+ end
+ return loadedtable(filename)
+end
+
+local function loadeverything(filename)
+ if type(filename) == "table" and validpresets(filename) then
+ filename = filename.files and filename.files.everything
+ end
+ return loadedtable(filename)
+end
+
+local function result(t,fmt,a,b,c)
+ if t then
+ report(fmt,a or "done",b or "done",c or "done","done")
+ return t
+ else
+ report(fmt,a or "failed",b or "failed",c or "failed","failed")
+ end
+end
+
+local f = replacer (
+ [[curl ]] ..
+ [[--silent --insecure ]] ..
+ [[-X POST ]] ..
+ [[-H "Authorization: Basic YjAxM2FhMjYtOTcyNC00ZGJkLTg4OTctMDQ4YjlhYWRhMjQ5OnRlc3Q=" ]] ..
+ [[-H "Accept: application/json, application/xml, text/json, text/x-json, text/javascript, text/xml" ]] ..
+ [[-d "Content-Type=application/x-www-form-urlencoded; charset=utf-8" ]] ..
+ [[-d "Host=rs.alarmnet.com/" ]] ..
+ [[-d "Cache-Control=no-store no-cache" ]] ..
+ [[-d "Pragma=no-cache" ]] ..
+ [[-d "grant_type=password" ]] ..
+ [[-d "scope=EMEA-V1-Basic EMEA-V1-Anonymous EMEA-V1-Get-Current-User-Account" ]] ..
+ [[-d "Username=%username%" ]] ..
+ [[-d "Password=%password%" ]] ..
+ [[-d "Connection=Keep-Alive" ]] ..
+ [["https://tccna.honeywell.com/Auth/OAuth/Token"]]
+)
+
+local function getaccesstoken(presets)
+ if validpresets(presets) then
+ local c = presets.credentials
+ local s = c and f {
+ username = c.username,
+ password = c.password,
+ applicationid = applicationid,
+ }
+ local r = s and resultof(s)
+ local t = r and jsontolua(r)
+ return result(t,"getting access token %a")
+ end
+ return result(false,"getting access token %a")
+end
+
+local f = replacer (
+ [[curl ]] ..
+ [[--silent --insecure ]] ..
+ [[-H "Authorization: bearer %accesstoken%" ]] ..
+ [[-H "Accept: application/json, application/xml, text/json, text/x-json, text/javascript, text/xml" ]] ..
+ [[-H "applicationId: %applicationid%" ]] ..
+ [["https://tccna.honeywell.com/WebAPI/emea/api/v1/userAccount"]]
+)
+
+local function getuserinfo(presets)
+ if validpresets(presets) then
+ local c = presets.credentials
+ local s = c and f {
+ accesstoken = c.accesstoken,
+ applicationid = c.applicationid,
+ }
+ local r = s and resultof(s)
+ local t = r and jsontolua(r)
+ return result(t,"getting user info for %a")
+ end
+ return result(false,"getting user info for %a")
+end
+
+local f = replacer (
+ [[curl ]] ..
+ [[--silent --insecure ]] ..
+ [[-H "Authorization: bearer %accesstoken%" ]] ..
+ [[-H "Accept: application/json, application/xml, text/json, text/x-json, text/javascript, text/xml" ]] ..
+ [[-H "applicationId: %applicationid%" ]] ..
+ [["https://tccna.honeywell.com/WebAPI/emea/api/v1/location/installationInfo?userId=%userid%&includeTemperatureControlSystems=True"]]
+)
+
+local function getlocationinfo(presets)
+ if validpresets(presets) then
+ local c = presets.credentials
+ local s = c and f {
+ accesstoken = c.accesstoken,
+ applicationid = applicationid,
+ userid = c.userid,
+ }
+ local r = s and resultof(s)
+ local t = r and jsontolua(r)
+ return result(t,"getting location info for %a")
+ end
+ return result(false,"getting location info for %a")
+end
+
+local f = replacer (
+ [[curl ]] ..
+ [[--silent --insecure ]] ..
+ [[-H "Authorization: bearer %accesstoken%" ]] ..
+ [[-H "Accept: application/json, application/xml, text/json, text/x-json, text/javascript, text/xml" ]] ..
+ [[-H "applicationId: %applicationid%" ]] ..
+ [["https://tccna.honeywell.com/WebAPI/emea/api/v1/temperatureZone/%zoneid%/schedule"]]
+)
+
+local function getschedule(presets,zonename)
+ if validpresets(presets) then
+ local zoneid = presets.data.zones[zonename].zoneId
+ if zoneid then
+ local c = presets.credentials
+ local s = c and f {
+ accesstoken = c.accesstoken,
+ applicationid = applicationid,
+ zoneid = zoneid,
+ }
+ local r = s and resultof(s)
+ local t = r and jsontolua(r)
+ return result(t,"getting schedule for zone %a, %s",zonename or "?")
+ end
+ end
+ return result(false,"getting schedule for zone %a, %s",zonename or "?")
+end
+
+local f = replacer (
+ [[curl ]] ..
+ [[--silent --insecure ]] ..
+ [[-H "Authorization: bearer %accesstoken%" ]] ..
+ [[-H "Accept: application/json, application/xml, text/json, text/x-json, text/javascript, text/xml" ]] ..
+ [[-H "applicationId: %applicationid%" ]] ..
+ [["https://tccna.honeywell.com/WebAPI/emea/api/v1/location/%locationid%/status?includeTemperatureControlSystems=True" ]]
+)
+
+local function getstatus(presets,locationid,locationname)
+ if locationid and validpresets(presets) then
+ local c = presets.credentials
+ local s = c and f {
+ accesstoken = c.accesstoken,
+ applicationid = applicationid,
+ locationid = locationid,
+ }
+ local r = s and resultof(s)
+ local t = r and jsontolua(r)
+ return result(t and t.gateways and t,"getting status for location %a, %s",locationname or "?")
+ end
+ return result(false,"getting status for location %a, %s",locationname or "?")
+end
+
+local function validated(presets)
+ if validpresets(presets) then
+ local data = getlocationinfo(presets)
+ if data and type(data) == "table" and data[1] and data[1].locationInfo then
+ return true
+ else
+ local data = getaccesstoken(presets)
+ if data then
+ presets.credentials.accesstoken = data.access_token
+ local data = getuserinfo(presets)
+ if data then
+ presets.credentials.userid = data.userId
+ return true
+ end
+ end
+ end
+ end
+end
+
+local function findzone(presets,name)
+ if not presets then
+ return
+ end
+ local data = presets.data
+ if not data then
+ return
+ end
+ local usedzones = data.zones
+ return usedzones and usedzones[name]
+end
+
+local function gettargets(zone) -- maybe also for a day
+ local schedule = zone.schedule
+ local min = false
+ local max = false
+ if schedule then
+ local schedules = schedule.dailySchedules
+ if schedules then
+ for i=1,#schedules do
+ local switchpoints = schedules[i].switchpoints
+ for i=1,#switchpoints do
+ local m = switchpoints[i].temperature
+ if not min or m < min then
+ min = m
+ end
+ if not max or m > max then
+ max = m
+ end
+ end
+ end
+ else
+ report("zone %a has no schedule",name)
+ end
+ end
+ return min, max
+end
+
+local function updatezone(presets,name,zone)
+ if not zone then
+ zone = findzone(presets,name)
+ end
+ if zone then
+ local oldtarget = presets.data.states[name]
+ local min = zone.heatSetpointCapabilities.minHeatSetpoint or 5
+ local max = zone.heatSetpointCapabilities.maxHeatSetpoint or 12
+ local mintarget, maxtarget = gettargets(zone)
+ -- todo: maybe get these from presets
+ if mintarget == false then
+ if min < 5 then
+ mintarget = 5
+ -- report("zone %a, min target limited to %a",name,mintarget)
+ else
+ mintarget = min
+ end
+ end
+ if maxtarget == false then
+ if max > 18.5 then
+ maxtarget = 18.5
+ -- report("zone %a, max target limited to %a",name,maxtarget)
+ else
+ maxtarget = max
+ end
+ end
+ local current = zone.temperatureStatus.temperature or 0
+ local target = zone.heatSetpointStatus.targetTemperature
+ local mode = zone.heatSetpointStatus.setpointMode
+ local state = (mode == "FollowSchedule" and "schedule" ) or
+ (mode == "PermanentOverride" and target <= mintarget and "permanent") or
+ (mode == "TemporaryOverride" and target <= mintarget and "off" ) or
+ (mode == "TemporaryOverride" and target >= maxtarget and "on" ) or
+ ( "unknown" )
+ local t = {
+ name = zone.name,
+ id = zone.zoneId,
+ schedule = zone.schedule,
+ mode = mode,
+ current = current,
+ target = target,
+ min = min,
+ max = max,
+ state = state,
+ lowest = mintarget,
+ highest = maxtarget,
+ }
+ -- report("zone %a, current %a, target %a",name,current,target)
+ presets.data.states[name] = t
+ return t
+ end
+end
+
+
+local function geteverything(presets,noschedules)
+ if validated(presets) then
+ local data = getlocationinfo(presets)
+ if data then
+ local usedgateways = presets.data.gateways
+ local usedzones = presets.data.zones
+ for i=1,#data do
+ local gateways = data[i].gateways
+ local locationinfo = data[i].locationInfo
+ local locationid = locationinfo and locationinfo.locationId
+ if gateways and locationid then
+ local status = getstatus(presets,locationid,locationinfo.name)
+ if status then
+ for i=1,#gateways do
+ local gatewaystatus = status.gateways[i]
+ local gatewayinfo = gateways[i]
+ local gatewaysystems = gatewayinfo.temperatureControlSystems
+ local info = gatewayinfo.gatewayInfo
+ local statussystems = gatewaystatus.temperatureControlSystems
+ if gatewaysystems and statussystems and info then
+ local mac = info.mac
+ if usedgateways[mac] then
+ report("%s gateway with mac address %a","using",mac)
+ for j=1,#gatewaysystems do
+ local gatewayzones = gatewaysystems[j].zones
+ local zonestatus = statussystems[j].zones
+ if gatewayzones and zonestatus then
+ for k=1,#gatewayzones do
+ local zonestatus = zonestatus[k]
+ local gatewayzone = gatewayzones[k]
+ if zonestatus and gatewayzone then
+ local zonename = zonestatus.name
+ local zoneid = zonestatus.zoneId
+ if validzonetypes[gatewayzone.zoneType] and zonename == gatewayzone.name then
+ gatewayzone.heatSetpointStatus = zonestatus.heatSetpointStatus
+ gatewayzone.temperatureStatus = zonestatus.temperatureStatus
+ local zonestatus = usedzones[zonename] -- findzone(states,zonename)
+ local schedule = zonestatus and zonestatus.schedule
+ usedzones[zonename] = gatewayzone
+ if schedule and noschedules then
+ gatewayzone.schedule = schedule
+ else
+ gatewayzone.schedule = getschedule(presets,zonename)
+ end
+ updatezone(presets,zonename,gatewayzone)
+ end
+ end
+ end
+ end
+ end
+ else
+ report("%s gateway with mac address %a","skipping",mac)
+ end
+ end
+ end
+ end
+ end
+ end
+ savetable(presets.files.everything,data)
+ return result(data,"getting everything, %s")
+ end
+ end
+ return result(false,"getting everything, %s")
+end
+
+local function gettemperatures(presets)
+ if validated(presets) then
+ local data = loadeverything(presets)
+ if not data or not next(data) then
+ data = geteverything(presets)
+ end
+ if data then
+ local updated = false
+ for i=1,#data do
+ local gateways = data[i].gateways
+ local locationinfo = data[i].locationInfo
+ local locationid = locationinfo.locationId
+ if gateways then
+ local status = getstatus(presets,locationid,locationinfo.name)
+ if status then
+ for i=1,#gateways do
+ local g = status.gateways[i]
+ local gateway = gateways[i]
+ local systems = gateway.temperatureControlSystems
+ if systems then
+ local s = g.temperatureControlSystems
+ for i=1,#systems do
+ local zones = systems[i].zones
+ if zones then
+ local z = s[i].zones
+ for i=1,#zones do
+ if validzonetypes[zone.zoneType] then
+ local z = z[i]
+ if z.name == zone.name then
+ zone.temperatureStatus = z.temperatureStatus
+ updated = true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if updated then
+ data.time = ostime()
+ savetable(presets.files.latest,data)
+ end
+ return result(data,"getting temperatures, %s")
+ end
+ end
+ return result(false,"getting temperatures, %s")
+end
+
+local function setmoment(target,time,data)
+ if not time then
+ time = ostime()
+ end
+ local t = osdate("*t",time )
+ local c_year, c_month, c_day, c_hour, c_minute = t.year, t.month, t.day, t.hour, t.min
+ --
+ local years = target.years if not years then years = { } target.years = years end
+ local d_year = years[c_year] if not d_year then d_year = { } years[c_year] = d_year end
+ local months = d_year.months if not months then months = { } d_year.months = months end
+ local d_month = months[c_month] if not d_month then d_month = { } months[c_month] = d_month end
+ local days = d_month.days if not days then days = { } d_month.days = days end
+ local d_day = days[c_day] if not d_day then d_day = { } days[c_day] = d_day end
+ local hours = d_day.hours if not hours then hours = { } d_day.hours = hours end
+ local d_hour = hours[c_hour] if not d_hour then d_hour = { } hours[c_hour] = d_hour end
+ --
+ c_minute = div(c_minute,15) + 1
+ --
+ local d_last = d_hour[c_minute]
+ if d_last then
+ for k, v in next, data do
+ local d = d_last[k]
+ if d then
+ data[k] = (d + v) / 2
+ end
+ end
+ end
+ d_hour[c_minute] = data
+ --
+ target.lasttime = {
+ year = c_year,
+ month = c_month,
+ day = c_day,
+ hour = c_hour,
+ minute = c_minute,
+ }
+end
+
+local function loadtemperatures(presets)
+ if validpresets(presets) then
+ local status = loadeverything(presets)
+ if status then
+ local usedgateways = presets.data.gateways
+ for i=1,#status do
+ local gateways = status[i].gateways
+ if gateways then
+ for i=1,#gateways do
+ local gatewayinfo = gateways[i]
+ local systems = gatewayinfo.temperatureControlSystems
+ local info = gatewayinfo.gatewayInfo
+ if systems and info and usedgateways[info.mac] then
+ for i=1,#systems do
+ local zones = systems[i].zones
+ if zones then
+ local summary = { time = status.time }
+ for i=1,#zones do
+ local zone = zones[i]
+ if validzonetypes[zone.zoneType] then
+ summary[#summary+1] = updatezone(presets,zone.name,zone)
+ end
+ end
+ return result(summary,"loading temperatures, %s")
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ return result(false,"loading temperatures, %s")
+end
+
+local function updatetemperatures(presets)
+ if validpresets(presets) then
+ local everythingname = presets.files.everything
+ local historyname = presets.files.history
+ if everythingname and historyname then
+ gettemperatures(presets,everythingname)
+ local t = loadtemperatures(presets)
+ if t then
+ local data = { }
+ for i=1,#t do
+ local ti = t[i]
+ data[ti.name] = ti.current
+ end
+ local history = loadhistory(historyname) or { }
+ setmoment(history,ostime(),data)
+ savetable(historyname,history)
+ return result(t,"updating temperatures, %s")
+ end
+ end
+ end
+ return result(false,"updating temperatures, %s")
+end
+
+local function getzonestate(presets,name)
+ return validpresets(presets) and presets.data.states[name]
+end
+
+local f = replacer (
+ [[curl ]] ..
+ [[--silent --insecure ]] ..
+ [[-X PUT ]] ..
+ [[-H "Authorization: bearer %accesstoken%" ]] ..
+ [[-H "Accept: application/json, application/xml, text/json, text/x-json, text/javascript, text/xml" ]] ..
+ [[-H "applicationId: %applicationid%" ]] ..
+ [[-H "Content-Type: application/json" ]] ..
+ [[-d "%[settings]%" ]] ..
+ [["https://tccna.honeywell.com/WebAPI/emea/api/v1/temperatureZone/%zoneid%/heatSetpoint"]]
+)
+
+local function untilmidnight()
+ local t = osdate("*t")
+ t.hour = 23
+ t.min = 59
+ t.sec = 59
+ return osdate("%Y-%m-%dT%H:%M:%SZ",ostime(t))
+end
+
+local followschedule = {
+ -- HeatSetpointValue = 0,
+ SetpointMode = "FollowSchedule",
+}
+
+local function setzonestate(presets,name,temperature,permanent)
+ local zone = findzone(presets,name)
+ if zone then
+ local m = followschedule
+ if type(temperature) == "number" and temperature > 0 then
+ if permanent then
+ m = {
+ HeatSetpointValue = temperature,
+ SetpointMode = "PermanentOverride",
+ }
+ else
+ m = {
+ HeatSetpointValue = temperature,
+ SetpointMode = "TemporaryOverride",
+ TimeUntil = untilmidnight(),
+ }
+ end
+ end
+ local s = f {
+ accesstoken = presets.credentials.accesstoken,
+ applicationid = applicationid,
+ zoneid = zone.zoneId,
+ settings = jsontostring(m),
+ }
+ local r = s and resultof(s)
+ local t = r and jsontolua(r)
+-- inspect(r)
+-- inspect(t)
+ return result(t,"setting state of zone %a, %s",name)
+ end
+ return result(false,"setting state of zone %a, %s",name)
+end
+
+local function resetzonestate(presets,name)
+ setzonestate(presets,name)
+end
+
+--
+
+local function update(presets,noschedules)
+ local everything = geteverything(presets,noschedules)
+ if everything then
+ presets.data.everything = everything
+ return presets
+ end
+end
+
+local function initialize(filename)
+ local presets = loadpresets(filename)
+ if presets then
+ return update(presets)
+ end
+end
+
+local function off(presets,name)
+ local zone = presets and getzonestate(presets,name)
+ if zone then
+ setzonestate(presets,name,zone.lowest)
+ end
+end
+
+local function on(presets,name)
+ local zone = presets and getzonestate(presets,name)
+ if zone then
+ setzonestate(presets,name,zone.highest)
+ end
+end
+
+local function schedule(presets,name)
+ local zone = presets and getzonestate(presets,name)
+ if zone then
+ resetzonestate(presets,name)
+ end
+end
+
+local function permanent(presets,name)
+ local zone = presets and getzonestate(presets,name)
+ if zone then
+ setzonestate(presets,name,zone.lowest,true)
+ end
+end
+
+-- tasks
+
+local function settask(presets,when,tag,action)
+ if when == "tomorrow" then
+ local list = presets.scheduled
+ if not list then
+ list = loadtable(presets.files.schedules) or { }
+ presets.scheduled = list
+ end
+ if action then
+ list[tag] = {
+ time = ostime() + 24*60*60,
+ done = false,
+ category = category,
+ action = action,
+ }
+ else
+ list[tag] = nil
+ end
+ savetable(presets.files.schedules,list)
+ end
+end
+
+local function gettask(presets,when,tag)
+ if when == "tomorrow" then
+ local list = presets.scheduled
+ if not list then
+ list = loadtable(presets.files.schedules) or { }
+ presets.scheduled = list
+ end
+ return list[tag]
+ end
+end
+
+local function resettask(presets,when,tag)
+ settask(presets,when,tag)
+end
+
+local function checktasks(presets)
+ local list = presets.scheduled
+ if not list then
+ list = loadtable(presets.files.schedules) or { }
+ presets.scheduled = list
+ end
+ if list then
+ local t = osdate("*t")
+ local q = { }
+ for k, v in next, list do
+ local d = osdate("*t",v.time)
+ if not v.done and d.year == t.year and d.month == t.month and d.day == t.day then
+ local a = v.action
+ if type(a) == "function" then
+ a()
+ end
+ v.done = true
+ end
+ if d.year <= t.year and d.month <= t.month and d.day < t.day then
+ q[k] = true
+ end
+ end
+ if next(q) then
+ for k, v in next, q do
+ list[q] = nil
+ end
+ savetable(presets.files.schedules,list)
+ end
+ return list
+ end
+end
+
+-- predefined tasks
+
+local function settomorrow(presets,tag,action)
+ settask(presets,"tomorrow",tag,action)
+end
+
+local function resettomorrow(presets,tag)
+ settask(presets,"tomorrow",tag)
+end
+
+local function tomorrowset(presets,tag)
+ return gettask(presets,"tomorrow",tag) and true or false
+end
+
+--
+
+local evohome
+
+local function poller(presets)
+ --
+ if type(presets) ~= "string" then
+ report("invalid presets file")
+ os.exit()
+ end
+ report("loading presets from %a",presets)
+ local presets = loadpresets(presets)
+ if not validpresets(presets) then
+ report("invalid presets, aborting")
+ os.exit()
+ end
+ --
+ local actions = presets.files.actions
+ if type(actions) ~= "string" then
+ report("invalid actions file")
+ os.exit()
+ end
+ report("loading actions from %a",actions)
+ local actions = loadtable(actions)
+ if type(actions) ~= "table" then
+ report("invalid actions, aborting")
+ os.exit()
+ end
+ actions = actions.actions
+ if type(actions) ~= "table" then
+ report("invalid actions file, no actions subtable")
+ os.exit()
+ end
+ --
+ report("updating device status")
+ update(presets)
+ --
+ presets.report = report
+ presets.evohome = evohome
+ presets.results = { }
+ --
+ function presets.getstate(name)
+ return getzonestate(presets,name)
+ end
+ function presets.tomorrowset(name)
+ return tomorrowset(presets,name)
+ end
+ --
+ local template = actions.template or presets.files.template
+ --
+ local process = function(t)
+ local category = t.category
+ local action = t.action
+ if category and action then
+ local c = actions[category]
+ if c then
+ local a = c[action]
+ if type(a) == "function" then
+ report("category %a, action %a, executing",category,action)
+ presets.results.template = template -- can be overloaded by action
+ a(presets)
+ update(presets,true)
+ else
+ report("category %a, action %a, invalid action, known: %, t",category,action,sortedkeys(c))
+ end
+ else
+ report("category %a, action %a, invalid category, known categories: %, t",category,action,sortedkeys(actions))
+ end
+ else
+ -- logs.report("invalid category and action")
+ end
+ end
+ --
+ local delay = presets.delay or 10
+ local interval = 15 * 60 -- 15 minutes
+ local interval = 60 * 60 -- 60 minutes
+ local refresh = 5 * 60
+ local passed = 0
+ local step = function()
+ if passed > interval then
+ report("refreshing states, every %i seconds",interval)
+ -- todo: update stepwise as this also updates the schedules that we don't really
+ -- change often and definitely not in the middle of the night, so maybe just
+ -- update 9:00 12:00 15:00 18:00 21:00
+ update(presets)
+ passed = 0
+ else
+ passed = passed + delay
+ end
+ checktasks(presets)
+ return delay
+ end
+ --
+ presets.refreshtime = refresh
+ --
+ return step, process, presets
+end
+
+--
+
+evohome = {
+ helpers = {
+ getaccesstoken = getaccesstoken, -- presets
+ getuserinfo = getuserinfo, -- presets
+ getlocationinfo = getlocationinfo, -- presets
+ getschedule = getschedule, -- presets, name
+ --
+ geteverything = geteverything, -- presets, noschedules
+ gettemperatures = gettemperatures, -- presets
+ getzonestate = getzonestate, -- presets, name
+ setzonestate = setzonestate, -- presets, name, temperature
+ resetzonestate = resetzonestate, -- presets, name
+ getzonedata = findzone, -- presets, name
+ --
+ loadpresets = loadpresets, -- filename
+ loadhistory = loadhistory, -- presets | filename
+ loadeverything = loadeverything, -- presets | filename
+ loadtemperatures = loadtemperatures, -- presets | filename
+ --
+ updatetemperatures = updatetemperatures, -- presets
+ },
+ actions= {
+ initialize = initialize, -- filename
+ update = update, -- presets
+ --
+ off = off, -- presets, name
+ on = on, -- presets, name
+ schedule = schedule, -- presets, name
+ permanent = permanent, -- presets, name
+ --
+ settomorrow = settomorrow, -- presets, tag, function
+ resettomorrow = resettomorrow, -- presets, tag
+ tomorrowset = tomorrowset, -- presets, tag
+ --
+ poller = poller, -- presets
+ }
+}
+
+if utilities then
+ utilities.evohome = evohome
+end
+
+-- local presets = evohome.helpers.loadpresets("c:/data/develop/domotica/code/evohome-presets.lua")
+-- evohome.helpers.setzonestate(presets,"Voorkamer",22)
+-- evohome.helpers.setzonestate(presets,"Voorkamer")
+
+return evohome
+
diff --git a/tex/context/base/mkiv/util-fil.lua b/tex/context/base/mkiv/util-fil.lua
index 01bcd571e..0e8ed4e57 100644
--- a/tex/context/base/mkiv/util-fil.lua
+++ b/tex/context/base/mkiv/util-fil.lua
@@ -6,10 +6,8 @@ if not modules then modules = { } end modules ['util-fil'] = {
license = "see context related readme files"
}
-local byte = string.byte
-local char = string.char
-local extract = bit32 and bit32.extract
-local floor = math.floor
+local byte = string.byte
+local char = string.char
-- Here are a few helpers (the starting point were old ones I used for parsing
-- flac files). In Lua 5.3 we can probably do this better. Some code will move
@@ -35,7 +33,10 @@ function files.close(f)
end
function files.size(f)
- return f:seek("end")
+ local current = f:seek()
+ local size = f:seek("end")
+ f:seek("set",current)
+ return size
end
files.getsize = files.size
@@ -233,13 +234,13 @@ function files.readfixed4(f)
end
end
-if extract then
+-- (real) ((n<<16)>>(16+14)) + ((n&0x3fff)/16384.0))
+
+if bit32 then
local extract = bit32.extract
local band = bit32.band
- -- (real) ((n<<16)>>(16+14)) + ((n&0x3fff)/16384.0))
-
function files.read2dot14(f)
local a, b = byte(f:read(2),1,2)
if a >= 0x80 then
@@ -263,20 +264,37 @@ end
-- writers (kind of slow)
-function files.writecardinal2(f,n)
- local a = char(n % 256)
- n = floor(n/256)
- local b = char(n % 256)
- f:write(b,a)
+if bit32 then
+
+ local rshift = bit32.rshift
+
+ function files.writecardinal2(f,n)
+ local a = char(n % 256)
+ n = rshift(n,8)
+ local b = char(n % 256)
+ f:write(b,a)
+ end
+
+else
+
+ local floor = math.floor
+
+ function files.writecardinal2(f,n)
+ local a = char(n % 256)
+ n = floor(n/256)
+ local b = char(n % 256)
+ f:write(b,a)
+ end
+
end
function files.writecardinal4(f,n)
local a = char(n % 256)
- n = floor(n/256)
+ n = rshift(n,8)
local b = char(n % 256)
- n = floor(n/256)
+ n = rshift(n,8)
local c = char(n % 256)
- n = floor(n/256)
+ n = rshift(n,8)
local d = char(n % 256)
f:write(d,c,b,a)
end
@@ -299,8 +317,8 @@ if fio and fio.readcardinal1 then
files.readinteger2 = fio.readinteger2
files.readinteger3 = fio.readinteger3
files.readinteger4 = fio.readinteger4
- -- files.readfixed2 = fio.readfixed2 -- needs recent luatex
- -- files.readfixed4 = fio.readfixed4 -- needs recent luatex
+ files.readfixed2 = fio.readfixed2
+ files.readfixed4 = fio.readfixed4
files.read2dot14 = fio.read2dot14
files.setposition = fio.setposition
files.getposition = fio.getposition
diff --git a/tex/context/base/mkiv/util-jsn.lua b/tex/context/base/mkiv/util-jsn.lua
index e835c07d6..e5f83e06c 100644
--- a/tex/context/base/mkiv/util-jsn.lua
+++ b/tex/context/base/mkiv/util-jsn.lua
@@ -21,7 +21,7 @@ local format = string.format
local utfchar = utf.char
local concat = table.concat
-local tonumber, tostring, rawset, type = tonumber, tostring, rawset, type
+local tonumber, tostring, rawset, type, next = tonumber, tostring, rawset, type, next
local json = utilities.json or { }
utilities.json = json
@@ -158,4 +158,11 @@ end
-- inspect(json.tostring(true))
+function json.load(filename)
+ local data = io.loaddata(filename)
+ if data then
+ return lpegmatch(jsonconverter,data)
+ end
+end
+
return json
diff --git a/tex/context/base/mkiv/util-lib.lua b/tex/context/base/mkiv/util-lib.lua
index e7b6e4875..714cfd4c7 100644
--- a/tex/context/base/mkiv/util-lib.lua
+++ b/tex/context/base/mkiv/util-lib.lua
@@ -80,6 +80,7 @@ local pathpart = file.pathpart
local nameonly = file.nameonly
local joinfile = file.join
local removesuffix = file.removesuffix
+local addsuffix = file.addsuffix
local findfile = resolvers.findfile
local findfiles = resolvers.findfiles
local expandpaths = resolvers.expandedpathlistfromvariable
@@ -104,14 +105,22 @@ local function locate(required,version,trace,report,action)
local required_path = pathpart(required_full)
local required_base = nameonly(required_full)
if qualifiedpath(required) then
- if isfile(required) then
+ -- also check with suffix
+ if isfile(addsuffix(required,os.libsuffix)) then
+ if trace then
+ report("qualified name %a found",required)
+ end
found_library = required
+ else
+ if trace then
+ report("qualified name %a not found",required)
+ end
end
else
-- initialize a few variables
local required_name = required_base .. "." .. os.libsuffix
local version = type(version) == "string" and version ~= "" and version or false
- local engine = environment.ownmain or false
+ local engine = "luatex" -- environment.ownmain or false
--
if trace and not done then
local list = expandpaths("lib") -- fresh, no reuse
@@ -177,10 +186,12 @@ local function locate(required,version,trace,report,action)
report("checking lib paths")
end
package.extralibpath(environment.ownpath)
- local paths = package.libpaths()
+ local paths = package.libpaths()
+ local pattern = "/[^/]+%." .. os.libsuffix .. "$"
for i=1,#paths do
- local found = check(lfs.isfile)
- if found and (not checkpattern or find(found,checkpattern)) then
+ required_path = gsub(paths[i],pattern,"")
+ local found = check(lfs.isfound)
+ if type(found) == "string" and (not checkpattern or find(found,checkpattern)) then
return found
end
end
@@ -211,18 +222,20 @@ local function locate(required,version,trace,report,action)
if trace then
report("found: %a",found_library)
end
- local message, result = action(found_library,required_base)
+ local result, message = action(found_library,required_base)
if result then
library = result
else
library = false
- report("load error: message %a, library %a",tostring(message),found_library or "no library")
+ report("load error: message %a, library %a",tostring(message or "unknown"),found_library or "no library")
end
end
- if not library then
- report("unknown: %a",required)
- elseif trace then
- report("stored: %a",required)
+ if trace then
+ if not library then
+ report("unknown library: %a",required)
+ else
+ report("stored library: %a",required)
+ end
end
return library
end
@@ -254,13 +267,12 @@ do
local libtype = type(library)
if libtype == "function" then
library = library()
- message = true
else
report_swiglib("load error: %a returns %a, message %a, library %a",opener,libtype,(string.gsub(message or "no message","[%s]+$","")),found_library or "no library")
library = false
end
popdir()
- return message, library
+ return library
end)
loadedlibs[required] = library or false
end
@@ -332,22 +344,50 @@ We use the same lookup logic for ffi loading.
local trace_ffilib = false
local savedffiload = ffi.load
+ -- local pushlibpath = package.pushlibpath
+ -- local poplibpath = package.poplibpath
+
+ -- ffi.savedload = savedffiload
+
trackers.register("resolvers.ffilib", function(v) trace_ffilib = v end)
+ -- pushlibpath(pathpart(name))
+ -- local message, library = pcall(savedffiload,nameonly(name))
+ -- poplibpath()
+
+ local loaded = { }
+
local function locateindeed(name)
- local message, library = pcall(savedffiload,removesuffix(name))
- if type(library) == "userdata" then
- return library
- else
- return false
+ name = removesuffix(name)
+ local l = loaded[name]
+ if l == nil then
+ local message, library = pcall(savedffiload,name)
+ if type(message) == "userdata" then
+ l = message
+ elseif type(library) == "userdata" then
+ l = library
+ else
+ l = false
+ end
+ loaded[name] = l
+ elseif trace_ffilib then
+ report_ffilib("reusing already loaded %a",name)
end
+ return l
end
- function ffilib(required,version)
- if version == "system" then
+ function ffilib(name,version)
+ name = removesuffix(name)
+ local l = loaded[name]
+ if l ~= nil then
+ if trace_ffilib then
+ report_ffilib("reusing already loaded %a",name)
+ end
+ return l
+ elseif version == "system" then
return locateindeed(name)
else
- return locate(required,version,trace_ffilib,report_ffilib,locateindeed)
+ return locate(name,version,trace_ffilib,report_ffilib,locateindeed)
end
end
@@ -355,10 +395,12 @@ We use the same lookup logic for ffi loading.
local library = ffilib(name)
if type(library) == "userdata" then
return library
- else
+ end
+ if trace_ffilib then
report_ffilib("trying to load %a using normal loader",name)
- return savedffiload(name)
end
+ -- so here we don't store
+ return savedffiload(name)
end
end
diff --git a/tex/context/base/mkiv/util-lua.lua b/tex/context/base/mkiv/util-lua.lua
index b3346006c..bd74d0843 100644
--- a/tex/context/base/mkiv/util-lua.lua
+++ b/tex/context/base/mkiv/util-lua.lua
@@ -10,16 +10,17 @@ if not modules then modules = { } end modules ['util-lua'] = {
-- we will remove the 5.1 code some day soon
local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format
-local load, loadfile, type = load, loadfile, type
+local load, loadfile, type, collectgarbage = load, loadfile, type, collectgarbage
utilities = utilities or {}
utilities.lua = utilities.lua or { }
local luautilities = utilities.lua
local report_lua = logs.reporter("system","lua")
+local report_mem = logs.reporter("system","lua memory")
local tracestripping = false
-local forcestupidcompile = true -- use internal bytecode compiler
+local tracememory = false
luautilities.stripcode = true -- support stripping when asked for
luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12)
luautilities.nofstrippedchunks = 0
@@ -71,12 +72,21 @@ end
-- quite subtle ... doing this wrong incidentally can give more bytes
-function luautilities.loadedluacode(fullname,forcestrip,name)
+function luautilities.loadedluacode(fullname,forcestrip,name,macros)
-- quite subtle ... doing this wrong incidentally can give more bytes
name = name or fullname
local code, message
- if environment.loadpreprocessedfile then
- code, message = environment.loadpreprocessedfile(fullname)
+ if macros then
+ macros = lua.macros
+ end
+ if macros and macros.enabled then
+ -- local c = io.loaddata(fullname) -- not yet available
+ local f = io.open(fullname,"rb") local c = f:read("*a") f:close()
+ local n = c and macros.resolvestring("--[["..fullname.."]] "..c)
+ if n and #n ~= #c then
+ report_lua("preprocessed file %a: %i => %i bytes",fullname,#c,#n)
+ end
+ code, message = load(n or c)
else
code, message = loadfile(fullname)
end
@@ -103,7 +113,7 @@ function luautilities.loadedluacode(fullname,forcestrip,name)
end
end
-function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+function luautilities.strippedloadstring(code,name,forcestrip) -- not executed
local code, message = load(code)
if not code then
report_lua("loading of file %a failed:\n\t%s",name,message or "no message")
@@ -116,6 +126,14 @@ function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
end
end
+function luautilities.loadstring(code,name) -- not executed
+ local code, message = load(code)
+ if not code then
+ report_lua("loading of file %a failed:\n\t%s",name,message or "no message")
+ end
+ return code, 0
+end
+
function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
report_lua("compiling %a into %a",luafile,lucfile)
os.remove(lucfile)
@@ -176,3 +194,24 @@ setmetatable(finalizers, {
function luautilities.registerfinalizer(f)
finalizers[#finalizers+1] = f
end
+
+function luautilities.checkmemory(previous,threshold,trace) -- threshold in MB
+ local current = collectgarbage("count")
+ if previous then
+ local checked = (threshold or 64)*1024
+ local delta = current - previous
+ if current - previous > checked then
+ collectgarbage("collect")
+ local afterwards = collectgarbage("count")
+ if trace or tracememory then
+ report_mem("previous %i MB, current %i MB, delta %i MB, threshold %i MB, afterwards %i MB",
+ previous/1024,current/1024,delta/1024,threshold,afterwards)
+ end
+ return afterwards
+ elseif trace or tracememory then
+ report_mem("previous %i MB, current %i MB, delta %i MB, threshold %i MB",
+ previous/1024,current/1024,delta/1024,threshold)
+ end
+ end
+ return current
+end
diff --git a/tex/context/base/mkiv/util-prs.lua b/tex/context/base/mkiv/util-prs.lua
index 650a7ead6..48d59a9f3 100644
--- a/tex/context/base/mkiv/util-prs.lua
+++ b/tex/context/base/mkiv/util-prs.lua
@@ -566,14 +566,16 @@ function parsers.rfc4180splitter(specification)
local field = escaped + non_escaped + Cc("")
local record = Ct(field * (separator * field)^1)
local headerline = record * Cp()
- local wholeblob = Ct((newline^(specification.strict and -1 or 1) * record)^0)
+ local morerecords = (newline^(specification.strict and -1 or 1) * record)^0
+ local headeryes = Ct(morerecords)
+ local headernop = Ct(record * morerecords)
return function(data,getheader)
if getheader then
local header, position = lpegmatch(headerline,data)
- local data = lpegmatch(wholeblob,data,position)
+ local data = lpegmatch(headeryes,data,position)
return data, header
else
- return lpegmatch(wholeblob,data)
+ return lpegmatch(headernop,data)
end
end
end
@@ -604,10 +606,10 @@ local cardinal = lpegpatterns.cardinal / tonumber
local spacers = lpegpatterns.spacer^0
local endofstring = lpegpatterns.endofstring
-local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + Cc(true) ) + Cc(false) )
+local stepper = spacers * ( cardinal * ( spacers * S(":-") * spacers * ( cardinal + Cc(true) ) + Cc(false) )
* Carg(1) * Carg(2) / ranger * S(", ")^0 )^1
-local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + (P("*") + endofstring) * Cc(true) ) + Cc(false) )
+local stepper = spacers * ( cardinal * ( spacers * S(":-") * spacers * ( cardinal + (P("*") + endofstring) * Cc(true) ) + Cc(false) )
* Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 * endofstring -- we're sort of strict (could do without endofstring)
function parsers.stepper(str,n,action)
diff --git a/tex/context/base/mkiv/util-sbx.lua b/tex/context/base/mkiv/util-sbx.lua
index 66a650875..57c576870 100644
--- a/tex/context/base/mkiv/util-sbx.lua
+++ b/tex/context/base/mkiv/util-sbx.lua
@@ -28,6 +28,7 @@ local concat = string.concat
local unquoted = string.unquoted
local optionalquoted = string.optionalquoted
local basename = file.basename
+local nameonly = file.nameonly
local sandbox = sandbox
local validroots = { }
@@ -122,9 +123,9 @@ local function registerlibrary(name)
return
end
if validlibraries == true then
- validlibraries = { [name] = true }
+ validlibraries = { [nameonly(name)] = true }
else
- validlibraries[name] = true
+ validlibraries[nameonly(name)] = true
end
elseif name == true then
validlibraries = { }
@@ -461,7 +462,7 @@ function sandbox.getrunner(name)
end
local function suspicious(str)
- return (find(str,"[/\\]") or find(command,"%.%.")) and true or false
+ return (find(str,"[/\\]") or find(command,"..",1,true)) and true or false
end
local function binaryrunner(action,command,...)
@@ -562,9 +563,9 @@ if FFISUPPORTED and ffi then
end
end
- local load = ffi.load
+ local fiiload = ffi.load
- if load then
+ if fiiload then
local reported = { }
@@ -573,10 +574,10 @@ if FFISUPPORTED and ffi then
-- all blocked
elseif validlibraries == true then
-- all permitted
- return load(name,...)
- elseif validlibraries[name] then
+ return fiiload(name,...)
+ elseif validlibraries[nameonly(name)] then
-- 'name' permitted
- return load(name,...)
+ return fiiload(name,...)
else
-- 'name' not permitted
end
diff --git a/tex/context/base/mkiv/util-sci.lua b/tex/context/base/mkiv/util-sci.lua
index e028d2f95..fe28635a3 100644
--- a/tex/context/base/mkiv/util-sci.lua
+++ b/tex/context/base/mkiv/util-sci.lua
@@ -50,6 +50,9 @@ local function loadscitelexer()
if not lexer then
lexer = require("scite-context-lexer")
require("scite-context-theme") -- uses lexer
+ if lexer then
+ lexer.context.disablewordcheck()
+ end
end
return lexer
end
@@ -122,7 +125,7 @@ end
local function exportwhites()
return setmetatableindex(function(t,k)
- local v = find(k,"white") and true or false
+ local v = find(k,"white",1,true) and true or false
t[k] = v
return v
end)
diff --git a/tex/context/base/mkiv/util-seq.lua b/tex/context/base/mkiv/util-seq.lua
index 5836f5eca..d302ff276 100644
--- a/tex/context/base/mkiv/util-seq.lua
+++ b/tex/context/base/mkiv/util-seq.lua
@@ -361,6 +361,6 @@ function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug
end
end
local processor = #calls > 0 and formatters[nostate and template_yes_nostate or template_yes_state](concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
--- print(processor)
+ -- print(processor)
return processor
end
diff --git a/tex/context/base/mkiv/util-sha.lua b/tex/context/base/mkiv/util-sha.lua
new file mode 100644
index 000000000..3e786a834
--- /dev/null
+++ b/tex/context/base/mkiv/util-sha.lua
@@ -0,0 +1,326 @@
+if not modules then modules = { } end modules ['util-sha'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ comment2 = "derived from Wikipedia and Lua support websites",
+ comment3 = "due to bit operators this code only works in lua(tex) 5.3",
+}
+
+-- This doesn't work in luajittex ... maybe some day it will have bit operators too.
+-- I'm not really in the mood for making this module aware (by compiling the
+-- function depending on the engine that I use but I probably won't use luajittex in
+-- cases where I need this.)
+--
+-- Hm, it actually makes a case for the macro subsystem but we then also need to
+-- make an unpack/pack replacement ... too boring.
+--
+-- This code is derived from:
+--
+-- http://lua-users.org/wiki/SecureHashAlgorithmBw
+--
+-- which in turn was a 5.3 variant of a 5.2 implementation by Roberto but it also
+-- looks like a more or less direct translation of:
+--
+-- https://en.wikipedia.org/wiki/SHA-2
+--
+-- I optimized the code bit and added 512 support. For an explanation see the
+-- mentioned websites. We don't do chunks here as we only need it for hashing
+-- relatively small blobs (and even an image is not that large).
+--
+-- On short strings 256 seems faster than 512 while on a megabyte blob 512 wins
+-- from 256 (64 bit internals).
+
+local packstring, unpackstring = string.pack, string.unpack
+local unpack, setmetatable = unpack, setmetatable
+
+local constants256 = {
+ 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
+ 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
+ 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
+ 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
+ 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
+ 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
+ 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
+ 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
+}
+
+local constants512 = {
+ 0x428a2f98d728ae22, 0x7137449123ef65cd, 0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc, 0x3956c25bf348b538,
+ 0x59f111f1b605d019, 0x923f82a4af194f9b, 0xab1c5ed5da6d8118, 0xd807aa98a3030242, 0x12835b0145706fbe,
+ 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2, 0x72be5d74f27b896f, 0x80deb1fe3b1696b1, 0x9bdc06a725c71235,
+ 0xc19bf174cf692694, 0xe49b69c19ef14ad2, 0xefbe4786384f25e3, 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65,
+ 0x2de92c6f592b0275, 0x4a7484aa6ea6e483, 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5, 0x983e5152ee66dfab,
+ 0xa831c66d2db43210, 0xb00327c898fb213f, 0xbf597fc7beef0ee4, 0xc6e00bf33da88fc2, 0xd5a79147930aa725,
+ 0x06ca6351e003826f, 0x142929670a0e6e70, 0x27b70a8546d22ffc, 0x2e1b21385c26c926, 0x4d2c6dfc5ac42aed,
+ 0x53380d139d95b3df, 0x650a73548baf63de, 0x766a0abb3c77b2a8, 0x81c2c92e47edaee6, 0x92722c851482353b,
+ 0xa2bfe8a14cf10364, 0xa81a664bbc423001, 0xc24b8b70d0f89791, 0xc76c51a30654be30, 0xd192e819d6ef5218,
+ 0xd69906245565a910, 0xf40e35855771202a, 0x106aa07032bbd1b8, 0x19a4c116b8d2d0c8, 0x1e376c085141ab53,
+ 0x2748774cdf8eeb99, 0x34b0bcb5e19b48a8, 0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb, 0x5b9cca4f7763e373,
+ 0x682e6ff3d6b2b8a3, 0x748f82ee5defb2fc, 0x78a5636f43172f60, 0x84c87814a1f0ab72, 0x8cc702081a6439ec,
+ 0x90befffa23631e28, 0xa4506cebde82bde9, 0xbef9a3f7b2c67915, 0xc67178f2e372532b, 0xca273eceea26619c,
+ 0xd186b8c721c0c207, 0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178, 0x06f067aa72176fba, 0x0a637dc5a2c898a6,
+ 0x113f9804bef90dae, 0x1b710b35131c471b, 0x28db77f523047d84, 0x32caab7b40c72493, 0x3c9ebe0a15c9bebc,
+ 0x431d67c49c100d4c, 0x4cc5d4becb3e42b6, 0x597f299cfc657e2a, 0x5fcb6fab3ad6faec, 0x6c44198c4a475817,
+}
+
+-- Not really needed, but more in tune with md5. In fact, as we use the mtxlib
+-- helpers I might as well assume more.
+
+local tohex, toHEX
+
+if lpeg then local lpegpatterns = lpeg.patterns if lpegpatterns then
+
+ local lpegmatch = lpeg.match
+ local bytestohex = lpegpatterns.bytestohex
+ local bytestoHEX = lpegpatterns.bytestoHEX
+
+ tohex = function(s) return lpegmatch(bytestohex,s) end
+ toHEX = function(s) return lpegmatch(bytestoHEX,s) end
+
+end end
+
+if not tohex then
+
+ local format, byte, gsub = string.format, string.byte, string.gsub
+
+ tohex = function(s) return (gsub(s,".",function(c) return format("%02X",byte(c)) end)) end
+ toHEX = function(s) return (gsub(s,".",function(c) return format("%02X",byte(c)) end)) end
+
+end
+
+local prepare = { }
+
+if utilities and utilities.strings then
+
+ local r = utilities.strings.newrepeater("\0")
+
+ prepare[256] = function(str,len)
+ return str .. "\128" .. r[-(1 + 8 + len) % 64] .. packstring(">I8", 8 * len)
+ end
+ prepare[512] = function(str,len)
+ return str .. "\128" .. r[-(1 + 16 + len) % 128] .. packstring(">I16", 8 * len)
+ end
+
+else
+
+ local rep = string.rep
+
+ prepare[256] = function(str,len)
+ return str .. "\128" .. rep("\0",-(1 + 8 + len) % 64) .. packstring(">I8", 8 * len)
+ end
+ prepare[512] = function(str,len)
+ return str .. "\128" .. rep("\0",-(1 + 16 + len) % 128) .. packstring(">I16", 8 * len)
+ end
+
+end
+
+prepare[224] = prepare[256]
+prepare[384] = prepare[512]
+
+local initialize = {
+ [224] = function(hash)
+ hash[1] = 0xc1059ed8 hash[2] = 0x367cd507
+ hash[3] = 0x3070dd17 hash[4] = 0xf70e5939
+ hash[5] = 0xffc00b31 hash[6] = 0x68581511
+ hash[7] = 0x64f98fa7 hash[8] = 0xbefa4fa4
+ return hash
+ end,
+ [256] = function(hash)
+ hash[1] = 0x6a09e667 hash[2] = 0xbb67ae85
+ hash[3] = 0x3c6ef372 hash[4] = 0xa54ff53a
+ hash[5] = 0x510e527f hash[6] = 0x9b05688c
+ hash[7] = 0x1f83d9ab hash[8] = 0x5be0cd19
+ return hash
+ end,
+ [384] = function(hash)
+ hash[1] = 0xcbbb9d5dc1059ed8 hash[2] = 0x629a292a367cd507
+ hash[3] = 0x9159015a3070dd17 hash[4] = 0x152fecd8f70e5939
+ hash[5] = 0x67332667ffc00b31 hash[6] = 0x8eb44a8768581511
+ hash[7] = 0xdb0c2e0d64f98fa7 hash[8] = 0x47b5481dbefa4fa4
+ return hash
+ end,
+ [512] = function(hash)
+ hash[1] = 0x6a09e667f3bcc908 hash[2] = 0xbb67ae8584caa73b
+ hash[3] = 0x3c6ef372fe94f82b hash[4] = 0xa54ff53a5f1d36f1
+ hash[5] = 0x510e527fade682d1 hash[6] = 0x9b05688c2b3e6c1f
+ hash[7] = 0x1f83d9abfb41bd6b hash[8] = 0x5be0cd19137e2179
+ return hash
+ end,
+}
+
+local digest = { }
+local list = { } -- some 5% faster
+
+digest[256] = function(str,i,hash)
+
+ for i=1,#str,64 do
+
+ -- local w = { unpackstring(">I4I4I4I4I4I4I4I4I4I4I4I4I4I4I4I4",str,i) }
+
+ list[ 1], list[ 2], list[ 3], list[ 4], list[ 5], list[ 6], list[ 7], list[ 8],
+ list[ 9], list[10], list[11], list[12], list[13], list[14], list[15], list[16] =
+ unpackstring(">I4I4I4I4I4I4I4I4I4I4I4I4I4I4I4I4",str,i)
+
+ for j=17,64 do
+ local v0 = list[j - 15]
+ local s0 = ((v0 >> 7) | (v0 << 25)) -- rrotate(v, 7)
+ ~ ((v0 >> 18) | (v0 << 14)) -- rrotate(v, 18)
+ ~ (v0 >> 3)
+ local v1 = list[j - 2]
+ local s1 = ((v1 >> 17) | (v1 << 15)) -- rrotate(v, 17)
+ ~ ((v1 >> 19) | (v1 << 13)) -- rrotate(v, 19)
+ ~ (v1 >> 10)
+ list[j] = (list[j - 16] + s0 + list[j - 7] + s1)
+ & 0xffffffff
+ end
+
+ local a, b, c, d, e, f, g, h = -- unpack(hash)
+ hash[1], hash[2], hash[3], hash[4], hash[5], hash[6], hash[7], hash[8]
+
+ for i=1,64 do
+ local s0 = ((a >> 2) | (a << 30)) -- rrotate(a, 2)
+ ~ ((a >> 13) | (a << 19)) -- rrotate(a, 13)
+ ~ ((a >> 22) | (a << 10)) -- rrotate(a, 22)
+ local maj = (a & b) ~ (a & c) ~ (b & c)
+ local t2 = s0 + maj
+ local s1 = ((e >> 6) | (e << 26)) -- rrotate(e, 6)
+ ~ ((e >> 11) | (e << 21)) -- rrotate(e, 11)
+ ~ ((e >> 25) | (e << 7)) -- rrotate(e, 25)
+ local ch = (e & f)
+ ~ (~e & g)
+ local t1 = h + s1 + ch + constants256[i] + list[i]
+ h = g
+ g = f
+ f = e
+ e = (d + t1) & 0xffffffff
+ d = c
+ c = b
+ b = a
+ a = (t1 + t2) & 0xffffffff
+ end
+
+ hash[1] = (hash[1] + a) & 0xffffffff
+ hash[2] = (hash[2] + b) & 0xffffffff
+ hash[3] = (hash[3] + c) & 0xffffffff
+ hash[4] = (hash[4] + d) & 0xffffffff
+ hash[5] = (hash[5] + e) & 0xffffffff
+ hash[6] = (hash[6] + f) & 0xffffffff
+ hash[7] = (hash[7] + g) & 0xffffffff
+ hash[8] = (hash[8] + h) & 0xffffffff
+
+ end
+end
+
+digest[512] = function(str,i,hash)
+
+ for i=1,#str,128 do
+
+ -- local w = { unpackstring(">I4I4I4I4I4I4I4I4I4I4I4I4I4I4I4I4",str,i) }
+
+ list[ 1], list[ 2], list[ 3], list[ 4], list[ 5], list[ 6], list[ 7], list[ 8],
+ list[ 9], list[10], list[11], list[12], list[13], list[14], list[15], list[16] =
+ unpackstring(">I8I8I8I8I8I8I8I8I8I8I8I8I8I8I8I8",str,i)
+
+ for j=17,80 do
+ local v0 = list[j - 15]
+ local s0 = ((v0 >> 1) | (v0 << 63)) -- rrotate(v, 1)
+ ~ ((v0 >> 8) | (v0 << 56)) -- rrotate(v, 8)
+ ~ (v0 >> 7)
+ local v1 = list[j - 2]
+ local s1 = ((v1 >> 19) | (v1 << 45)) -- rrotate(v, 19)
+ ~ ((v1 >> 61) | (v1 << 3)) -- rrotate(v, 61)
+ ~ (v1 >> 6)
+ list[j] = (list[j - 16] + s0 + list[j - 7] + s1)
+ -- & 0xffffffffffffffff
+ end
+
+ local a, b, c, d, e, f, g, h = -- unpack(hash)
+ hash[1], hash[2], hash[3], hash[4], hash[5], hash[6], hash[7], hash[8]
+
+ for i=1,80 do
+ local s0 = ((a >> 28) | (a << 36)) -- rrotate(a, 28)
+ ~ ((a >> 34) | (a << 30)) -- rrotate(a, 34)
+ ~ ((a >> 39) | (a << 25)) -- rrotate(a, 39)
+ local maj = (a & b) ~ (a & c) ~ (b & c)
+ local t2 = s0 + maj
+ local s1 = ((e >> 14) | (e << 50)) -- rrotate(e, 14)
+ ~ ((e >> 18) | (e << 46)) -- rrotate(e, 18)
+ ~ ((e >> 41) | (e << 23)) -- rrotate(e, 41)
+ local ch = (e & f)
+ ~ (~e & g)
+ local t1 = h + s1 + ch + constants512[i] + list[i]
+ h = g
+ g = f
+ f = e
+ e = (d + t1) -- & 0xffffffffffffffff
+ d = c
+ c = b
+ b = a
+ a = (t1 + t2) -- & 0xffffffffffffffff
+ end
+
+ hash[1] = (hash[1] + a) -- & 0xffffffffffffffff
+ hash[2] = (hash[2] + b) -- & 0xffffffffffffffff
+ hash[3] = (hash[3] + c) -- & 0xffffffffffffffff
+ hash[4] = (hash[4] + d) -- & 0xffffffffffffffff
+ hash[5] = (hash[5] + e) -- & 0xffffffffffffffff
+ hash[6] = (hash[6] + f) -- & 0xffffffffffffffff
+ hash[7] = (hash[7] + g) -- & 0xffffffffffffffff
+ hash[8] = (hash[8] + h) -- & 0xffffffffffffffff
+
+ end
+end
+
+digest[224] = digest[256]
+digest[384] = digest[512]
+
+local finalize = {
+ [224] = function(hash,tohex) return tohex(packstring(">I4I4I4I4I4I4I4", unpack(hash))) end, -- # 56
+ [256] = function(hash,tohex) return tohex(packstring(">I4I4I4I4I4I4I4I4",unpack(hash))) end, -- # 64
+ [384] = function(hash,tohex) return tohex(packstring(">I8I8I8I8I8I8", unpack(hash))) end, -- # 96
+ [512] = function(hash,tohex) return tohex(packstring(">I8I8I8I8I8I8I8I8",unpack(hash))) end, -- # 128
+}
+
+local hash = { }
+
+local function hashed(str,method,tohex)
+ local s = prepare[method](str,#str)
+ local h = initialize[method](hash)
+ digest[method](s,i,h)
+ return finalize[method](h,tohex)
+end
+
+local sha2 = {
+ hash224 = function(str) return hashed(str,224,tohex) end,
+ hash256 = function(str) return hashed(str,256,tohex) end,
+ hash384 = function(str) return hashed(str,384,tohex) end,
+ hash512 = function(str) return hashed(str,512,tohex) end,
+ HASH224 = function(str) return hashed(str,224,toHEX) end,
+ HASH256 = function(str) return hashed(str,256,toHEX) end,
+ HASH384 = function(str) return hashed(str,384,toHEX) end,
+ HASH512 = function(str) return hashed(str,512,toHEX) end,
+}
+
+-- local setmetatableindex = table.setmetatableindex
+--
+-- if setmetatableindex then
+-- sha2.hashed = setmetatableindex(function(t,k)
+-- local v = digest[k] and function(str) return hashed(str,k,tohex) end or false
+-- t[k] = v
+-- return v
+-- end)
+-- sha2.HASHED = setmetatableindex(function(t,k)
+-- local v = digest[k] and function(str) return hashed(str,k,toHEX) end or false
+-- t[k] = v
+-- return v
+-- end)
+-- end
+
+if utilities then
+ utilities.sha2 = sha2
+end
+
+return sha2
+
diff --git a/tex/context/base/mkiv/util-sql-imp-ffi.lua b/tex/context/base/mkiv/util-sql-imp-ffi.lua
new file mode 100644
index 000000000..07c4752cd
--- /dev/null
+++ b/tex/context/base/mkiv/util-sql-imp-ffi.lua
@@ -0,0 +1,569 @@
+if not modules then modules = { } end modules ['util-sql-imp-ffi'] = {
+ version = 1.001,
+ comment = "companion to util-sql.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- I looked at luajit-mysql to see how the ffi mapping was done but it didn't work
+-- out that well (at least not on windows) but I got the picture. As I have somewhat
+-- different demands I simplified / redid the ffi bti and just took the swiglib
+-- variant and adapted that.
+
+local tonumber = tonumber
+local concat = table.concat
+local format, byte = string.format, string.byte
+local lpegmatch = lpeg.match
+local setmetatable, type = setmetatable, type
+local sleep = os.sleep
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql","ffi")
+
+if not utilities.sql then
+ require("util-sql")
+end
+
+ffi.cdef [[
+
+ /*
+ This is as lean and mean as possible. After all we just need a connection and
+ a query. The rest is handled already in the Lua code elsewhere.
+ */
+
+ typedef void MYSQL_instance;
+ typedef void MYSQL_result;
+ typedef char **MYSQL_row;
+ typedef unsigned int MYSQL_offset;
+
+ typedef struct st_mysql_field {
+ char *name;
+ char *org_name;
+ char *table;
+ char *org_table;
+ char *db;
+ char *catalog;
+ char *def;
+ unsigned long length;
+ unsigned long max_length;
+ unsigned int name_length;
+ unsigned int org_name_length;
+ unsigned int table_length;
+ unsigned int org_table_length;
+ unsigned int db_length;
+ unsigned int catalog_length;
+ unsigned int def_length;
+ unsigned int flags;
+ unsigned int decimals;
+ unsigned int charsetnr;
+ int type;
+ void *extension;
+ } MYSQL_field;
+
+ void free(void*ptr);
+ void * malloc(size_t size);
+
+ MYSQL_instance * mysql_init (
+ MYSQL_instance *mysql
+ );
+
+ MYSQL_instance * mysql_real_connect (
+ MYSQL_instance *mysql,
+ const char *host,
+ const char *user,
+ const char *passwd,
+ const char *db,
+ unsigned int port,
+ const char *unix_socket,
+ unsigned long clientflag
+ );
+
+ unsigned int mysql_errno (
+ MYSQL_instance *mysql
+ );
+
+ const char *mysql_error (
+ MYSQL_instance *mysql
+ );
+
+ /* int mysql_query (
+ MYSQL_instance *mysql,
+ const char *q
+ ); */
+
+ int mysql_real_query (
+ MYSQL_instance *mysql,
+ const char *q,
+ unsigned long length
+ );
+
+ MYSQL_result * mysql_store_result (
+ MYSQL_instance *mysql
+ );
+
+ void mysql_free_result (
+ MYSQL_result *result
+ );
+
+ unsigned long long mysql_num_rows (
+ MYSQL_result *res
+ );
+
+ MYSQL_row mysql_fetch_row (
+ MYSQL_result *result
+ );
+
+ unsigned int mysql_num_fields (
+ MYSQL_result *res
+ );
+
+ /* MYSQL_field *mysql_fetch_field (
+ MYSQL_result *result
+ ); */
+
+ MYSQL_field * mysql_fetch_fields (
+ MYSQL_result *res
+ );
+
+ MYSQL_offset mysql_field_seek(
+ MYSQL_result *result,
+ MYSQL_offset offset
+ );
+
+ void mysql_close(
+ MYSQL_instance *sock
+ );
+
+ /* unsigned long * mysql_fetch_lengths(
+ MYSQL_result *result
+ ); */
+
+]]
+
+local sql = utilities.sql
+----- mysql = ffi.load(os.name == "windows" and "libmysql" or "libmysqlclient")
+----- mysql = ffilib(os.name == "windows" and "libmysql" or "libmysqlclient")
+local mysql = ffilib(os.name == "windows" and "libmysql" or "libmysql")
+
+if not mysql then
+ report_state("unable to load library")
+end
+
+local nofretries = 5
+local retrydelay = 1
+
+local cache = { }
+local helpers = sql.helpers
+local methods = sql.methods
+local validspecification = helpers.validspecification
+local querysplitter = helpers.querysplitter
+local dataprepared = helpers.preparetemplate
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+
+local mysql_initialize = mysql.mysql_init
+
+local mysql_open_connection = mysql.mysql_real_connect
+local mysql_execute_query = mysql.mysql_real_query
+local mysql_close_connection = mysql.mysql_close
+
+local mysql_field_seek = mysql.mysql_field_seek
+local mysql_num_fields = mysql.mysql_num_fields
+local mysql_fetch_fields = mysql.mysql_fetch_fields
+----- mysql_fetch_field = mysql.mysql_fetch_field
+local mysql_num_rows = mysql.mysql_num_rows
+local mysql_fetch_row = mysql.mysql_fetch_row
+----- mysql_fetch_lengths = mysql.mysql_fetch_lengths
+local mysql_init = mysql.mysql_init
+local mysql_store_result = mysql.mysql_store_result
+local mysql_free_result = mysql.mysql_free_result
+
+local mysql_error_message = mysql.mysql_error
+
+local NULL = ffi.cast("MYSQL_result *",0)
+
+local ffi_tostring = ffi.string
+local ffi_gc = ffi.gc
+
+----- mysqldata = ffi.cast("MYSQL_instance*",mysql.malloc(1024*1024))
+local instance = mysql.mysql_init(nil) -- (mysqldata)
+
+local mysql_constant_false = false
+local mysql_constant_true = true
+
+local function finish(t)
+ local r = t._result_
+ if r then
+ ffi_gc(r,mysql_free_result)
+ end
+end
+
+local function getcolnames(t)
+ return t.names
+end
+
+local function getcoltypes(t)
+ return t.types
+end
+
+local function numrows(t)
+ return tonumber(t.nofrows)
+end
+
+local function list(t)
+ local result = t._result_
+ if result then
+ local row = mysql_fetch_row(result)
+ -- local len = mysql_fetch_lengths(result)
+ local result = { }
+ for i=1,t.noffields do
+ result[i] = ffi_tostring(row[i-1])
+ end
+ return result
+ end
+end
+
+local function hash(t)
+ local result = t._result_
+ local fields = t.names
+ if result then
+ local row = mysql_fetch_row(result)
+ -- local len = mysql_fetch_lengths(result)
+ local result = { }
+ for i=1,t.noffields do
+ result[fields[i]] = ffi_tostring(row[i-1])
+ end
+ return result
+ end
+end
+
+local function wholelist(t)
+ return fetch_all_rows(t._result_)
+end
+
+local mt = { __index = {
+ -- regular
+ finish = finish,
+ list = list,
+ hash = hash,
+ wholelist = wholelist,
+ -- compatibility
+ numrows = numrows,
+ getcolnames = getcolnames,
+ getcoltypes = getcoltypes,
+ -- fallback
+ _result_ = nil,
+ names = { },
+ types = { },
+ noffields = 0,
+ nofrows = 0,
+ }
+}
+
+local nt = setmetatable({},mt)
+
+-- session
+
+local function close(t)
+ mysql_close_connection(t._connection_)
+end
+
+local function execute(t,query)
+ if query and query ~= "" then
+ local connection = t._connection_
+ local result = mysql_execute_query(connection,query,#query)
+ if result == 0 then
+ local result = mysql_store_result(connection)
+ if result ~= NULL then
+ mysql_field_seek(result,0)
+ local nofrows = tonumber(mysql_num_rows(result) or 0)
+ local noffields = tonumber(mysql_num_fields(result))
+ local names = { }
+ local types = { }
+ local fields = mysql_fetch_fields(result)
+ for i=1,noffields do
+ local field = fields[i-1]
+ names[i] = ffi_tostring(field.name)
+ types[i] = tonumber(field.type) -- todo
+ end
+ local t = {
+ _result_ = result,
+ names = names,
+ types = types,
+ noffields = noffields,
+ nofrows = nofrows,
+ }
+ return setmetatable(t,mt)
+ else
+ return nt
+ end
+ end
+ end
+ return false
+end
+
+local mt = { __index = {
+ close = close,
+ execute = execute,
+ }
+}
+
+local function open(t,database,username,password,host,port)
+ local connection = mysql_open_connection(
+ t._session_,
+ host or "localhost",
+ username or "",
+ password or "",
+ database or "",
+ port or 0,
+ NULL,
+ 0
+ )
+ if connection ~= NULL then
+ local t = {
+ _connection_ = connection,
+ }
+ return setmetatable(t,mt)
+ end
+end
+
+local function message(t)
+ return mysql_error_message(t._session_)
+end
+
+local function close(t)
+ -- dummy, as we have a global session
+end
+
+local mt = {
+ __index = {
+ connect = open,
+ close = close,
+ message = message,
+ }
+}
+
+local function initialize()
+ local session = {
+ _session_ = mysql_initialize(instance) -- maybe share, single thread anyway
+ }
+ return setmetatable(session,mt)
+end
+
+-- -- -- --
+
+local function connect(session,specification)
+ return session:connect(
+ specification.database or "",
+ specification.username or "",
+ specification.password or "",
+ specification.host or "",
+ specification.port
+ )
+end
+
+local function error_in_connection(specification,action)
+ report_state("error in connection: [%s] %s@%s to %s:%s",
+ action or "unknown",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+end
+
+local function datafetched(specification,query,converter)
+ if not query or query == "" then
+ report_state("no valid query")
+ return { }, { }
+ end
+ local id = specification.id
+ local session, connection
+ if id then
+ local c = cache[id]
+ if c then
+ session = c.session
+ connection = c.connection
+ end
+ if not connection then
+ session = initialize()
+ connection = connect(session,specification)
+ if not connection then
+ for i=1,nofretries do
+ sleep(retrydelay)
+ report_state("retrying to connect: [%s.%s] %s@%s to %s:%s",
+ id,i,
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ connection = connect(session,specification)
+ if connection then
+ break
+ end
+ end
+ end
+ if connection then
+ cache[id] = { session = session, connection = connection }
+ end
+ end
+ else
+ session = initialize()
+ connection = connect(session,specification)
+ if not connection then
+ for i=1,nofretries do
+ sleep(retrydelay)
+ report_state("retrying to connect: [%s] %s@%s to %s:%s",
+ i,
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ connection = connect(session,specification)
+ if connection then
+ break
+ end
+ end
+ end
+ end
+ if not connection then
+ report_state("error in connection: %s@%s to %s:%s",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ return { }, { }
+ end
+ query = lpegmatch(querysplitter,query)
+ local result, message, okay
+ for i=1,#query do
+ local q = query[i]
+ local r, m = connection:execute(q)
+ if m then
+ report_state("error in query, stage: %s",string.collapsespaces(q or "?"))
+ message = message and format("%s\n%s",message,m) or m
+ end
+ if type(r) == "table" then
+ result = r
+ okay = true
+ elseif not m then
+ okay = true
+ end
+ end
+ local data, keys
+ if result then
+ if converter then
+ data = converter.ffi(result)
+ else
+ keys = result.names
+ data = { }
+ for i=1,result.nofrows do
+ data[i] = result:hash()
+ end
+ end
+ result:finish() -- result:close()
+ elseif message then
+ report_state("message %s",message)
+ end
+ if not keys then
+ keys = { }
+ end
+ if not data then
+ data = { }
+ end
+ if not id then
+ connection:close()
+ session:close()
+ end
+ return data, keys
+end
+
+local function execute(specification)
+ if trace_sql then
+ report_state("executing library")
+ end
+ if not validspecification(specification) then
+ report_state("error in specification")
+ return
+ end
+ local query = dataprepared(specification)
+ if not query then
+ report_state("error in preparation")
+ return
+ end
+ local data, keys = datafetched(specification,query,specification.converter)
+ if not data then
+ report_state("error in fetching")
+ return
+ end
+ local one = data[1]
+ if one then
+ setmetatable(data,{ __index = one } )
+ end
+ return data, keys
+end
+
+local wraptemplate = [[
+----- mysql = ffi.load(os.name == "windows" and "libmysql" or "libmysqlclient")
+local mysql = ffi.load(os.name == "windows" and "libmysql" or "libmysql")
+
+local mysql_fetch_row = mysql.mysql_fetch_row
+local ffi_tostring = ffi.string
+
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
+
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
+
+local NULL = ffi.cast("MYSQL_result *",0)
+
+%s
+
+return function(result)
+ if not result then
+ return { }
+ end
+ local nofrows = result.nofrows or 0
+ if nofrows == 0 then
+ return { }
+ end
+ local noffields = result.noffields or 0
+ local _result_ = result._result_
+ local target = { } -- no %s needed here
+ for i=1,nofrows do
+ local cells = { }
+ local row = mysql_fetch_row(_result_)
+ for j=1,noffields do
+ local s = row[j-1]
+ if s == NULL then
+ cells[j] = ""
+ else
+ cells[j] = ffi_tostring(s)
+ end
+ end
+ target[%s] = {
+ %s
+ }
+ end
+ result:finish() -- result:close()
+ return target
+end
+]]
+
+local celltemplate = "cells[%s]"
+
+methods.ffi = {
+ runner = function() end, -- never called
+ execute = execute,
+ initialize = initialize, -- returns session
+ usesfiles = false,
+ wraptemplate = wraptemplate,
+ celltemplate = celltemplate,
+}
diff --git a/tex/context/base/mkiv/util-sql-imp-library.lua b/tex/context/base/mkiv/util-sql-imp-library.lua
index e16853612..a2b692e45 100644
--- a/tex/context/base/mkiv/util-sql-imp-library.lua
+++ b/tex/context/base/mkiv/util-sql-imp-library.lua
@@ -12,7 +12,7 @@ if not modules then modules = { } end modules ['util-sql-imp-library'] = {
-- we couldn't figure it out (some issue with adapting the table that is passes as first
-- argument in the fetch routine. Apart from this it looks like the mysql binding has some
-- efficiency issues (like creating a keys and types table for each row) but that could be
--- optimized. Anyhow, fecthing results can be done as follows:
+-- optimized. Anyhow, fetching results can be done as follows:
-- local function collect_1(r)
-- local t = { }
diff --git a/tex/context/base/mkiv/util-sql-imp-sqlite.lua b/tex/context/base/mkiv/util-sql-imp-sqlite.lua
index 1a960c1c3..04d5ced3a 100644
--- a/tex/context/base/mkiv/util-sql-imp-sqlite.lua
+++ b/tex/context/base/mkiv/util-sql-imp-sqlite.lua
@@ -6,16 +6,9 @@ if not modules then modules = { } end modules ['util-sql-imp-sqlite'] = {
license = "see context related readme files"
}
-local next = next
+local next, tonumber = next, tonumber
-local sql = require("util-sql")
------ sql = utilities.sql
-local sqlite = require("swiglib.sqlite.core")
-local swighelpers = require("swiglib.helpers.core")
-
--- sql.sqlite = sqlite -- maybe in the module itself
-
--- inspect(table.sortedkeys(sqlite))
+local sql = utilities.sql or require("util-sql")
local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
@@ -25,26 +18,95 @@ local helpers = sql.helpers
local methods = sql.methods
local validspecification = helpers.validspecification
local preparetemplate = helpers.preparetemplate
-local splitdata = helpers.splitdata
-local serialize = sql.serialize
-local deserialize = sql.deserialize
-local getserver = sql.getserver
local setmetatable = setmetatable
local formatters = string.formatters
-local get_list_item = sqlite.char_p_array_getitem
-local is_okay = sqlite.SQLITE_OK
-local execute_query = sqlite.sqlite3_exec_lua_callback
-local error_message = sqlite.sqlite3_errmsg
+----- sqlite = require("swiglib.sqlite.core")
+----- swighelpers = require("swiglib.helpers.core")
+-----
+----- get_list_item = sqlite.char_p_array_getitem
+----- is_okay = sqlite.SQLITE_OK
+----- execute_query = sqlite.sqlite3_exec_lua_callback
+----- error_message = sqlite.sqlite3_errmsg
+-----
+----- new_db = sqlite.new_sqlite3_p_array
+----- open_db = sqlite.sqlite3_open
+----- get_db = sqlite.sqlite3_p_array_getitem
+----- close_db = sqlite.sqlite3_close
+----- dispose_db = sqlite.delete_sqlite3_p_array
+
+local ffi = require("ffi")
+
+ffi.cdef [[
+
+ typedef struct sqlite3 sqlite3;
+
+ int sqlite3_initialize (
+ void
+ ) ;
+
+ int sqlite3_open (
+ const char *filename,
+ sqlite3 **ppDb
+ ) ;
+
+ int sqlite3_close (
+ sqlite3 *
+ ) ;
+
+ int sqlite3_exec (
+ sqlite3*,
+ const char *sql,
+ int (*callback)(void*,int,char**,char**),
+ void *,
+ char **errmsg
+ ) ;
+
+ const char *sqlite3_errmsg (
+ sqlite3*
+ );
+]]
+
+local ffi_tostring = ffi.string
+
+----- sqlite = ffi.load("sqlite3")
+local sqlite = ffilib("sqlite3")
+
+sqlite.sqlite3_initialize();
+
+local c_errmsg = sqlite.sqlite3_errmsg
+local c_open = sqlite.sqlite3_open
+local c_close = sqlite.sqlite3_close
+local c_exec = sqlite.sqlite3_exec
+
+local is_okay = 0
+local open_db = c_open
+local close_db = c_close
+local execute_query = c_exec
+
+local function error_message(db)
+ return ffi_tostring(c_errmsg(db))
+end
-local new_db = sqlite.new_sqlite3_p_array
-local open_db = sqlite.sqlite3_open
-local get_db = sqlite.sqlite3_p_array_getitem
-local close_db = sqlite.sqlite3_close
-local dispose_db = sqlite.delete_sqlite3_p_array
+local function new_db(n)
+ return ffi.new("sqlite3*["..n.."]")
+end
-local cache = { }
+local function dispose_db(db)
+end
+
+local function get_db(db,n)
+ return db[n]
+end
+
+-- local function execute_query(dbh,query,callback)
+-- local c = ffi.cast("int (*callback)(void*,int,char**,char**)",callback)
+-- c_exec(dbh,query,c,nil,nil)
+-- c:free()
+-- end
+
+local cache = { }
setmetatable(cache, {
__gc = function(t)
@@ -135,30 +197,31 @@ local function execute(specification)
query = preamble .. query -- only needed in open
end
if converter then
- converter = converter.sqlite
+ local convert = converter.sqlite
+ local column = { }
callback = function(data,nofcolumns,values,fields)
- local column = { }
- for i=0,nofcolumns-1 do
- column[i+1] = get_list_item(values,i)
+ for i=1,nofcolumns do
+ -- column[i] = get_list_item(values,i-1)
+ column[i] = ffi_tostring(values[i-1])
end
- nofrows = nofrows + 1
- result[nofrows] = converter(column)
+ nofrows = nofrows + 1
+ result[nofrows] = convert(column)
return is_okay
end
- --
- -- callback = converter.sqlite
else
+ local column = { }
callback = function(data,nofcolumns,values,fields)
- local column = { }
for i=0,nofcolumns-1 do
local field
if keysdone then
field = keys[i+1]
else
- field = get_list_item(fields,i)
+ -- field = get_list_item(fields,i)
+ field = ffi_tostring(fields[i])
keys[i+1] = field
end
- column[field] = get_list_item(values,i)
+ -- column[field] = get_list_item(values,i)
+ column[field] = ffi_tostring(values[i])
end
nofrows = nofrows + 1
keysdone = true
@@ -201,34 +264,6 @@ end
local celltemplate = "cells[%s]"
--- todo: how to deal with result ... pass via temp global .. bah .. or
--- also pass the execute here ... not now
---
--- local wraptemplate = [[
--- local converters = utilities.sql.converters
--- local deserialize = utilities.sql.deserialize
---
--- local tostring = tostring
--- local tonumber = tonumber
--- local booleanstring = string.booleanstring
---
--- local get_list_item = utilities.sql.sqlite.char_p_array_getitem
--- local is_okay = utilities.sql.sqlite.SQLITE_OK
---
--- %s
---
--- return function(data,nofcolumns,values,fields)
--- -- no %s (data) needed
--- -- no %s (i) needed
--- local cells = { }
--- for i=0,nofcolumns-1 do
--- cells[i+1] = get_list_item(values,i)
--- end
--- result[#result+1] = { %s }
--- return is_okay
--- end
--- ]]
-
methods.sqlite = {
execute = execute,
usesfiles = false,
diff --git a/tex/context/base/mkiv/util-sql-loggers.lua b/tex/context/base/mkiv/util-sql-loggers.lua
index 4473f8971..b69e397d2 100644
--- a/tex/context/base/mkiv/util-sql-loggers.lua
+++ b/tex/context/base/mkiv/util-sql-loggers.lua
@@ -53,16 +53,15 @@ loggers.totype = totype
loggers.fromtype = fromtype
local template = [[
- CREATE TABLE IF NOT EXISTS %basename% (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `time` int(11) NOT NULL,
- `type` int(11) NOT NULL,
- `action` varchar(15) NOT NULL,
- `data` longtext,
- PRIMARY KEY (`id`),
- UNIQUE KEY `id_unique_key` (`id`)
- )
- DEFAULT CHARSET = utf8 ;
+CREATE TABLE IF NOT EXISTS %basename% (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `time` int(11) NOT NULL,
+ `type` int(11) NOT NULL,
+ `action` varchar(15) NOT NULL,
+ `data` longtext,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `id_unique_key` (`id`)
+) DEFAULT CHARSET = utf8 ;
]]
local sqlite_template = [[
@@ -78,6 +77,7 @@ local sqlite_template = [[
function loggers.createdb(presets,datatable)
local db = checkeddb(presets,datatable)
+
db.execute {
template = db.usedmethod == "sqlite" and sqlite_template or template,
variables = {
diff --git a/tex/context/base/mkiv/util-sql-logins.lua b/tex/context/base/mkiv/util-sql-logins.lua
new file mode 100644
index 000000000..dcb48fb35
--- /dev/null
+++ b/tex/context/base/mkiv/util-sql-logins.lua
@@ -0,0 +1,305 @@
+if not modules then modules = { } end modules ['util-sql-logins'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not utilities.sql then require("util-sql") end
+
+local sql = utilities.sql
+local sqlexecute = sql.execute
+local sqlmakeconverter = sql.makeconverter
+
+local format = string.format
+local ostime = os.time
+local formatter = string.formatter
+
+local trace_logins = true
+local report_logins = logs.reporter("sql","logins")
+
+local logins = sql.logins or { }
+sql.logins = logins
+
+logins.maxnoflogins = logins.maxnoflogins or 10
+logins.cooldowntime = logins.cooldowntime or 10 * 60
+logins.purgetime = logins.purgetime or 1 * 60 * 60
+logins.autopurge = true
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "logins")
+end
+
+logins.usedb = checkeddb
+
+local template = [[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(50) NOT NULL,
+ `time` int(11) DEFAULT '0',
+ `n` int(11) DEFAULT '0',
+ `state` int(11) DEFAULT '0',
+
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `id_unique_index` (`id`),
+ UNIQUE KEY `name_unique_key` (`name`)
+ ) DEFAULT CHARSET = utf8 ;
+]]
+
+local sqlite_template = [[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `id` INTEGER NOT NULL AUTO_INCREMENT,
+ `name` TEXT NOT NULL,
+ `time` INTEGER DEFAULT '0',
+ `n` INTEGER DEFAULT '0',
+ `state` INTEGER DEFAULT '0'
+ ) ;
+]]
+
+function logins.createdb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ local data, keys = db.execute {
+ template = db.usedmethod == "sqlite" and sqlite_template or template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report_logins("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ DROP TABLE IF EXISTS %basename% ;
+]]
+
+function logins.deletedb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ local data, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report_logins("datatable %a removed in %a",db.name,db.base)
+
+end
+
+local states = {
+ [0] = "unset",
+ [1] = "known",
+ [2] = "unknown",
+}
+
+local converter_fetch, fields_fetch = sqlmakeconverter {
+ { name = "id", type = "number" },
+ { name = "name", type = "string" },
+ { name = "time", type = "number" },
+ { name = "n", type = "number" },
+ { name = "state", type = "number" }, -- faster than mapping
+}
+
+local template_fetch = format( [[
+ SELECT
+ %s
+ FROM
+ `logins`
+ WHERE
+ `name` = '%%[name]%%'
+]], fields_fetch )
+
+local template_insert = [[
+ INSERT INTO `logins`
+ ( `name`, `state`, `time`, `n`)
+ VALUES
+ ('%[name]%', %state%, %time%, %n%)
+]]
+
+local template_update = [[
+ UPDATE
+ `logins`
+ SET
+ `state` = %state%,
+ `time` = %time%,
+ `n` = %n%
+ WHERE
+ `name` = '%[name]%'
+]]
+
+local template_delete = [[
+ DELETE FROM
+ `logins`
+ WHERE
+ `name` = '%[name]%'
+]]
+
+local template_purge = [[
+ DELETE FROM
+ `logins`
+ WHERE
+ `time` < '%time%'
+]]
+
+-- todo: auto cleanup (when new attempt)
+
+local cache = { } setmetatable(cache, { __mode = 'v' })
+
+-- local function usercreate(presets)
+-- sqlexecute {
+-- template = template_create,
+-- presets = presets,
+-- }
+-- end
+
+function logins.userunknown(db,name)
+ local d = {
+ name = name,
+ state = 2,
+ time = ostime(),
+ n = 0,
+ }
+ db.execute {
+ template = template_update,
+ variables = d,
+ }
+ cache[name] = d
+ report_logins("user %a is registered as unknown",name)
+end
+
+function logins.userknown(db,name)
+ local d = {
+ name = name,
+ state = 1,
+ time = ostime(),
+ n = 0,
+ }
+ db.execute {
+ template = template_update,
+ variables = d,
+ }
+ cache[name] = d
+ report_logins("user %a is registered as known",name)
+end
+
+function logins.userreset(db,name)
+ db.execute {
+ template = template_delete,
+ }
+ cache[name] = nil
+ report_logins("user %a is reset",name)
+end
+
+local function userpurge(db,delay)
+ db.execute {
+ template = template_purge,
+ variables = {
+ time = ostime() - (delay or logins.purgetime),
+ }
+ }
+ cache = { }
+ report_logins("users are purged")
+end
+
+logins.userpurge = userpurge
+
+local function verdict(okay,...)
+ if not trace_logins then
+ -- no tracing
+ elseif okay then
+ report_logins("%s, granted",formatter(...))
+ else
+ report_logins("%s, blocked",formatter(...))
+ end
+ return okay
+end
+
+local lasttime = 0
+
+function logins.userpermitted(db,name)
+ local currenttime = ostime()
+ if logins.autopurge and (lasttime == 0 or (currenttime - lasttime > logins.purgetime)) then
+ report_logins("automatic purge triggered")
+ userpurge(db)
+ lasttime = currenttime
+ end
+ local data = cache[name]
+ if data then
+ report_logins("user %a is cached",name)
+ else
+ report_logins("user %a is fetched",name)
+ data = db.execute {
+ template = template_fetch,
+ converter = converter_fetch,
+ variables = {
+ name = name,
+ }
+ }
+ end
+ if not data or not data.name then
+ local d = {
+ name = name,
+ state = 0,
+ time = currenttime,
+ n = 1,
+ }
+ db.execute {
+ template = template_insert,
+ variables = d,
+ }
+ cache[name] = d
+ return verdict(true,"creating new entry for %a",name)
+ end
+ cache[name] = data[1]
+ local state = data.state
+ if state == 2 then -- unknown
+ return verdict(false,"user %a has state %a",name,states[state])
+ end
+ local n = data.n
+ local m = logins.maxnoflogins
+ if n > m then
+ local deltatime = currenttime - data.time
+ local cooldowntime = logins.cooldowntime
+ if deltatime < cooldowntime then
+ return verdict(false,"user %a is blocked for %s seconds out of %s",name,cooldowntime-deltatime,cooldowntime)
+ else
+ n = 0
+ end
+ end
+ if n == 0 then
+ local d = {
+ name = name,
+ state = 0,
+ time = currenttime,
+ n = 1,
+ }
+ db.execute {
+ template = template_update,
+ variables = d,
+ }
+ cache[name] = d
+ return verdict(true,"user %a gets a first chance",name)
+ else
+ local d = {
+ name = name,
+ state = 0,
+ time = currenttime,
+ n = n + 1,
+ }
+ db.execute {
+ template = template_update,
+ variables = d,
+ }
+ cache[name] = d
+ return verdict(true,"user %a gets a new chance, %s attempts out of %s done",name,n,m)
+ end
+end
+
+return logins
diff --git a/tex/context/base/mkiv/util-sql-sessions.lua b/tex/context/base/mkiv/util-sql-sessions.lua
index 76bb91962..17cf66142 100644
--- a/tex/context/base/mkiv/util-sql-sessions.lua
+++ b/tex/context/base/mkiv/util-sql-sessions.lua
@@ -48,8 +48,16 @@ local template =[[
`created` int(11) NOT NULL,
`accessed` int(11) NOT NULL,
UNIQUE KEY `token_unique_key` (`token`)
- )
- DEFAULT CHARSET = utf8 ;
+ ) DEFAULT CHARSET = utf8 ;
+]]
+
+local sqlite_template =[[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `token` TEXT NOT NULL,
+ `data` TEXT NOT NULL,
+ `created` INTEGER DEFAULT '0',
+ `accessed` INTEGER DEFAULT '0'
+ ) ;
]]
function sessions.createdb(presets,datatable)
@@ -57,7 +65,7 @@ function sessions.createdb(presets,datatable)
local db = checkeddb(presets,datatable)
db.execute {
- template = template,
+ template = db.usedmethod == "sqlite" and sqlite_template or template,
variables = {
basename = db.basename,
},
diff --git a/tex/context/base/mkiv/util-sql-tickets.lua b/tex/context/base/mkiv/util-sql-tickets.lua
index f392c0b91..3258fb186 100644
--- a/tex/context/base/mkiv/util-sql-tickets.lua
+++ b/tex/context/base/mkiv/util-sql-tickets.lua
@@ -18,6 +18,8 @@ local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
local random = math.random
local concat = table.concat
+if not utilities.sql then require("util-sql") end
+
local sql = utilities.sql
local tickets = { }
sql.tickets = tickets
@@ -27,7 +29,6 @@ local report = logs.reporter("sql","tickets")
local serialize = sql.serialize
local deserialize = sql.deserialize
-local execute = sql.execute
tickets.newtoken = sql.tokens.new
@@ -66,7 +67,7 @@ end
tickets.usedb = checkeddb
-local template =[[
+local template = [[
CREATE TABLE IF NOT EXISTS %basename% (
`id` int(11) NOT NULL AUTO_INCREMENT,
`token` varchar(50) NOT NULL,
@@ -82,14 +83,30 @@ local template =[[
PRIMARY KEY (`id`),
UNIQUE INDEX `id_unique_index` (`id` ASC),
KEY `token_unique_key` (`token`)
- )
- DEFAULT CHARSET = utf8 ;
+ ) DEFAULT CHARSET = utf8 ;
+]]
+
+local sqlite_template = [[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `id` TEXT NOT NULL AUTO_INCREMENT,
+ `token` TEXT NOT NULL,
+ `subtoken` INTEGER DEFAULT '0',
+ `created` INTEGER DEFAULT '0',
+ `accessed` INTEGER DEFAULT '0',
+ `category` INTEGER DEFAULT '0',
+ `status` INTEGER DEFAULT '0',
+ `usertoken` TEXT NOT NULL,
+ `data` TEXT NOT NULL,
+ `comment` TEXT NOT NULL
+ ) ;
]]
function tickets.createdb(presets,datatable)
+
local db = checkeddb(presets,datatable)
+
local data, keys = db.execute {
- template = template,
+ template = db.usedmethod == "sqlite" and sqlite_template or template,
variables = {
basename = db.basename,
},
diff --git a/tex/context/base/mkiv/util-sql-users.lua b/tex/context/base/mkiv/util-sql-users.lua
index a1f433946..7204fb310 100644
--- a/tex/context/base/mkiv/util-sql-users.lua
+++ b/tex/context/base/mkiv/util-sql-users.lua
@@ -10,47 +10,77 @@ if not modules then modules = { } end modules ['util-sql-users'] = {
-- because it's easier to dirtribute this way. Eventually it will be documented
-- and the related scripts will show up as well.
--- local sql = sql or (utilities and utilities.sql) or require("util-sql")
--- local md5 = md5 or require("md5")
-
local sql = utilities.sql
-local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern
-local sumhexa = md5.sumhexa
+local find, topattern = string.find, string.topattern
+local sumHEXA = md5.sumHEXA
local toboolean = string.toboolean
+local lpegmatch = lpeg.match
-local sql = utilities.sql
+local sql = require("util-sql") -- utilities.sql
local users = { }
sql.users = users
local trace_sql = false trackers.register("sql.users.trace", function(v) trace_sql = v end)
local report = logs.reporter("sql","users")
-local function encryptpassword(str)
+local split = lpeg.splitat(":")
+local valid = nil
+local hash = function(s) return "MD5:" .. sumHEXA(s) end
+
+if LUAVERSION >= 5.3 then
+
+ local sha2 = require("util-sha")
+
+ local HASH224 = sha2.HASH224
+ local HASH256 = sha2.HASH256
+ local HASH384 = sha2.HASH384
+ local HASH512 = sha2.HASH512
+
+ valid = {
+ MD5 = hash,
+ SHA224 = function(s) return "SHA224:" .. HASH224(s) end,
+ SHA256 = function(s) return "SHA256:" .. HASH256(s) end,
+ SHA384 = function(s) return "SHA384:" .. HASH384(s) end,
+ SHA512 = function(s) return "SHA512:" .. HASH512(s) end,
+ }
+
+else
+
+ valid = {
+ MD5 = hash,
+ SHA224 = hash,
+ SHA256 = hash,
+ SHA384 = hash,
+ SHA512 = hash,
+ }
+
+end
+
+local function encryptpassword(str,how)
if not str or str == "" then
return ""
- elseif find(str,"^MD5:") then
+ end
+ local prefix, rest = lpegmatch(split,str)
+ if prefix and rest and valid[prefix] then
return str
- else
- return upper(format("MD5:%s",sumhexa(str)))
end
+ return (how and valid[how] or valid.MD5)(str)
end
local function cleanuppassword(str)
- return (gsub(str,"^MD5:",""))
+ local prefix, rest = lpegmatch(split,str)
+ if prefix and rest and valid[prefix] then
+ return rest
+ end
+ return str
end
local function samepasswords(one,two)
if not one or not two then
return false
end
- if not find(one,"^MD5:") then
- one = encryptpassword(one)
- end
- if not find(two,"^MD5:") then
- two = encryptpassword(two)
- end
- return one == two
+ return encryptpassword(one) == encryptpassword(two)
end
local function validaddress(address,addresses)
@@ -64,7 +94,6 @@ local function validaddress(address,addresses)
end
end
-
users.encryptpassword = encryptpassword
users.cleanuppassword = cleanuppassword
users.samepasswords = samepasswords
@@ -103,13 +132,23 @@ users.groupnumbers = groupnumbers
-- password 'test':
--
-- INSERT insert into users (`name`,`password`,`group`,`enabled`) values ('...','MD5:098F6BCD4621D373CADE4E832627B4F6',1,1) ;
+--
+-- MD5:098F6BCD4621D373CADE4E832627B4F6
+-- SHA224:90A3ED9E32B2AAF4C61C410EB925426119E1A9DC53D4286ADE99A809
+-- SHA256:9F86D081884C7D659A2FEAA0C55AD015A3BF4F1B2B0B822CD15D6C15B0F00A08
+-- SHA384:768412320F7B0AA5812FCE428DC4706B3CAE50E02A64CAA16A782249BFE8EFC4B7EF1CCB126255D196047DFEDF17A0A9
+-- SHA512:EE26B0DD4AF7E749AA1A8EE3C10AE9923F618980772E473F8819A5D4940E0DB27AC185F8A0E1D5F84F88BC887FD67B143732C304CC5FA9AD8E6F57F50028A8FF
-local template =[[
+-- old values (a name can have utf and a password a long hash):
+--
+-- name 80, fullname 80, password 50
+
+local template = [[
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
- `name` varchar(80) NOT NULL,
- `fullname` varchar(80) NOT NULL,
- `password` varchar(50) DEFAULT NULL,
+ `name` varchar(100) NOT NULL,
+ `fullname` varchar(100) NOT NULL,
+ `password` varchar(200) DEFAULT NULL,
`group` int(11) NOT NULL,
`enabled` int(11) DEFAULT '1',
`email` varchar(80) DEFAULT NULL,
@@ -121,6 +160,21 @@ local template =[[
) DEFAULT CHARSET = utf8 ;
]]
+local sqlite_template = [[
+ CREATE TABLE `users` (
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `name` TEXT NOT NULL,
+ `fullname` TEXT NOT NULL,
+ `password` TEXT DEFAULT NULL,
+ `group` INTEGER NOT NULL,
+ `enabled` INTEGER DEFAULT '1',
+ `email` TEXT DEFAULT NULL,
+ `address` TEXT DEFAULT NULL,
+ `theme` TEXT DEFAULT NULL,
+ `data` TEXT DEFAULT NULL
+ ) ;
+]]
+
local converter, fields = sql.makeconverter {
{ name = "id", type = "number" },
{ name = "name", type = "string" },
@@ -139,7 +193,7 @@ function users.createdb(presets,datatable)
local db = checkeddb(presets,datatable)
db.execute {
- template = template,
+ template = db.usedmethod == "sqlite" and sqlite_template or template,
variables = {
basename = db.basename,
},
diff --git a/tex/context/base/mkiv/util-sql.lua b/tex/context/base/mkiv/util-sql.lua
index 09056c048..d01a2267c 100644
--- a/tex/context/base/mkiv/util-sql.lua
+++ b/tex/context/base/mkiv/util-sql.lua
@@ -103,17 +103,21 @@ local defaults = { __index =
variables = { },
username = "default",
password = "default",
- host = "localhost",
+ host = "localhost", -- 127.0.0.1 is sometimes more reliable
port = 3306,
database = "default",
},
}
setmetatableindex(sql.methods,function(t,k)
- report_state("start loading method %a",k)
- require("util-sql-imp-"..k)
- report_state("loading method %a done",k)
- return rawget(t,k)
+ if type(k) == "string" then
+ report_state("start loading method %a",k)
+ require("util-sql-imp-"..k)
+ report_state("loading method %a done",k)
+ return rawget(t,k)
+ else
+ report_state("invalid method %a",tostring(k))
+ end
end)
-- converters
@@ -285,7 +289,13 @@ local currentmethod
local currentserver
local function firstexecute(...)
- local execute = methods[currentmethod].execute
+ local method = methods[currentmethod]
+ if not method then
+ report_state("invalid sql method")
+ sql.execute = function() end
+ return nil
+ end
+ local execute = method.execute
sql.execute = execute
return execute(...)
end
@@ -326,6 +336,10 @@ function sql.usedatabase(presets,datatable)
if not method then
usedmethod = sql.methods.client
method = usedmethod and sqlmethods[usedmethod]
+ if not method then
+ report_state("invalid method")
+ return
+ end
end
local base = presets.database or "test"
local basename = format("`%s`.`%s`",base,name)
diff --git a/tex/context/base/mkiv/util-sta.lua b/tex/context/base/mkiv/util-sta.lua
index 27ab5a624..d140cacdc 100644
--- a/tex/context/base/mkiv/util-sta.lua
+++ b/tex/context/base/mkiv/util-sta.lua
@@ -154,7 +154,7 @@ function stacker.new(name)
local function resolve_step(ti) -- keep track of changes outside function !
-- todo: optimize for n=1 etc
local result = nil
- local noftop = #top
+ local noftop = top and #top or 0
if ti > 0 then
local current = list[ti]
if current then
@@ -289,24 +289,24 @@ end
--
-- local concat = table.concat
--
--- local pdfliteral = nodes.pool.pdfliteral
+-- local pdfpageliteral = nodes.pool.pdfpageliteral
--
-- function demostacker.start(s,t,first,last)
-- local n = whatever[t[last]]
-- -- s.report("start: %s",n)
--- return pdfliteral(n)
+-- return pdfpageliteral(n)
-- end
--
-- function demostacker.stop(s,t,first,last)
-- local n = whatever[false]
-- -- s.report("stop: %s",n)
--- return pdfliteral(n)
+-- return pdfpageliteral(n)
-- end
--
-- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
-- local n = whatever[t2[last2]]
-- -- s.report("change: %s",n)
--- return pdfliteral(n)
+-- return pdfpageliteral(n)
-- end
--
-- demostacker.mode = "switch"
@@ -325,7 +325,7 @@ end
-- r[#r+1] = whatever[t[i]]
-- end
-- -- s.report("start: %s",concat(r," "))
--- return pdfliteral(concat(r," "))
+-- return pdfpageliteral(concat(r," "))
-- end
--
-- function demostacker.stop(s,t,first,last)
@@ -334,7 +334,7 @@ end
-- r[#r+1] = whatever[false]
-- end
-- -- s.report("stop: %s",concat(r," "))
--- return pdfliteral(concat(r," "))
+-- return pdfpageliteral(concat(r," "))
-- end
--
-- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
@@ -346,7 +346,7 @@ end
-- r[#r+1] = whatever[t2[i]]
-- end
-- -- s.report("change: %s",concat(r," "))
--- return pdfliteral(concat(r," "))
+-- return pdfpageliteral(concat(r," "))
-- end
--
-- demostacker.mode = "stack"
diff --git a/tex/context/base/mkiv/util-sto.lua b/tex/context/base/mkiv/util-sto.lua
index d21267d7a..5b6915eaf 100644
--- a/tex/context/base/mkiv/util-sto.lua
+++ b/tex/context/base/mkiv/util-sto.lua
@@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['util-sto'] = {
license = "see context related readme files"
}
-local setmetatable, getmetatable, type = setmetatable, getmetatable, type
+local setmetatable, getmetatable, rawset, type = setmetatable, getmetatable, rawset, type
utilities = utilities or { }
utilities.storage = utilities.storage or { }
@@ -158,6 +158,29 @@ function table.setmetatablecall(t,f)
return t
end
+-- the manual is somewhat fuzzy about this but suggests that one can best
+-- set all fields before assigning a metatable
+
+function table.setmetatableindices(t,f,n,c)
+ if type(t) ~= "table" then
+ f, t = t, { }
+ end
+ local m = getmetatable(t)
+ local i = f_index[f] or f
+ if m then
+ m.__index = i
+ m.__newindex = n
+ m.__call = c
+ else
+ setmetatable(t,{
+ __index = i,
+ __newindex = n,
+ __call = c,
+ })
+ end
+ return t
+end
+
function table.setmetatablekey(t,key,value)
local m = getmetatable(t)
if not m then
@@ -172,3 +195,75 @@ function table.getmetatablekey(t,key,value)
local m = getmetatable(t)
return m and m[key]
end
+
+-- Problem: we have no __next (which is ok as it would probably slow down lua) so
+-- we cannot loop over the keys.
+
+-- local parametersets = table.autokeys()
+--
+-- parametersets.foo.bar = function(t,k) return "OEPS" end
+-- parametersets.foo.foo = "SPEO"
+-- parametersets.crap = { a = "a", b = table.autokey { function() return "b" end } }
+--
+-- print(parametersets.foo.bar)
+-- print(parametersets.foo.foo)
+-- print(parametersets.crap.b)
+-- print(parametersets.crap.b[1])
+
+-- function table.autotables(t)
+-- local t = t or { }
+-- local m = getmetatable(t)
+-- if not m then
+-- m = { }
+-- setmetatable(t,m)
+-- end
+-- m.__newindex = function(t,k,p)
+-- local v = { }
+-- local m = {
+-- __index = function(t,k)
+-- local v = p[k]
+-- if type(v) == "function" then
+-- return v(t,k) -- so we can have multiple arguments
+-- else
+-- return v
+-- end
+-- end,
+-- __newindex = function(t,k,v)
+-- p[k] = v
+-- end,
+-- __len = function(t)
+-- return #p
+-- end,
+-- }
+-- setmetatable(v,m)
+-- rawset(t,k,v)
+-- return v
+-- end
+-- m.__index = function(t,k)
+-- local v = { }
+-- t[k] = v -- calls newindex
+-- return v
+-- end
+-- return t
+-- end
+--
+-- function table.autokeys(p)
+-- local t = { }
+-- setmetatable(t, {
+-- __newindex = function(t,k,v)
+-- p[k] = v
+-- end,
+-- __index = function(t,k)
+-- local v = p[k]
+-- if type(v) == "function" then
+-- return v(t,k) -- so we can have multiple arguments
+-- else
+-- return v
+-- end
+-- end,
+-- __len = function(t)
+-- return #p
+-- end,
+-- })
+-- return t
+-- end
diff --git a/tex/context/base/mkiv/util-str.lua b/tex/context/base/mkiv/util-str.lua
index cebbc6be2..9da0c6a2f 100644
--- a/tex/context/base/mkiv/util-str.lua
+++ b/tex/context/base/mkiv/util-str.lua
@@ -12,17 +12,18 @@ local strings = utilities.strings
local format, gsub, rep, sub, find = string.format, string.gsub, string.rep, string.sub, string.find
local load, dump = load, string.dump
-local tonumber, type, tostring = tonumber, type, tostring
+local tonumber, type, tostring, next = tonumber, type, tostring, next
local unpack, concat = table.unpack, table.concat
local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc
local patterns, lpegmatch = lpeg.patterns, lpeg.match
-local utfchar, utfbyte = utf.char, utf.byte
+local utfchar, utfbyte, utflen = utf.char, utf.byte, utf.len
+
----- loadstripped = utilities.lua.loadstripped
----- setmetatableindex = table.setmetatableindex
local loadstripped = nil
-if _LUAVERSION < 5.2 then
+if LUAVERSION < 5.2 then
loadstripped = function(str,shortcuts)
return load(str)
@@ -145,6 +146,18 @@ function strings.tabtospace(str,tab)
return lpegmatch(pattern,str,1,tab or 7)
end
+function string.utfpadding(s,n)
+ if not n or n == 0 then
+ return ""
+ end
+ local l = utflen(s)
+ if n > 0 then
+ return nspaces[n-l]
+ else
+ return nspaces[-n-l]
+ end
+end
+
-- local t = {
-- "1234567123456712345671234567",
-- "\tb\tc",
@@ -281,41 +294,48 @@ end
--
-- More info can be found in cld-mkiv.pdf so here I stick to a simple list.
--
--- integer %...i number
--- integer %...d number
--- unsigned %...u number
--- character %...c number
--- hexadecimal %...x number
--- HEXADECIMAL %...X number
--- octal %...o number
--- string %...s string number
--- float %...f number
--- checked float %...F number
--- exponential %...e number
--- exponential %...E number
--- autofloat %...g number
--- autofloat %...G number
--- utf character %...c number
--- force tostring %...S any
--- force tostring %Q any
--- force tonumber %N number (strip leading zeros)
--- signed number %I number
--- rounded number %r number
--- 0xhexadecimal %...h character number
--- 0xHEXADECIMAL %...H character number
--- U+hexadecimal %...u character number
--- U+HEXADECIMAL %...U character number
--- points %p number (scaled points)
--- basepoints %b number (scaled points)
--- table concat %...t table
--- table concat %{.}t table
--- serialize %...T sequenced (no nested tables)
--- serialize %{.}T sequenced (no nested tables)
--- boolean (logic) %l boolean
--- BOOLEAN %L boolean
--- whitespace %...w
--- automatic %...a 'whatever' (string, table, ...)
--- automatic %...A "whatever" (string, table, ...)
+-- integer %...i number
+-- integer %...d number
+-- unsigned %...u number -- no tused
+-- character %...c number
+-- hexadecimal %...x number
+-- HEXADECIMAL %...X number
+-- octal %...o number
+-- string %...s string number
+-- float %...f number
+-- checked float %...F number
+-- exponential %...e number
+-- exponential %...E number
+-- stripped e %...j number
+-- stripped E %...J number
+-- autofloat %...g number
+-- autofloat %...G number
+-- utf character %...c number
+-- force tostring %...S any
+-- force tostring %Q any
+-- force tonumber %N number (strip leading zeros)
+-- signed number %I number
+-- rounded number %r number
+-- 0xhexadecimal %...h character number
+-- 0xHEXADECIMAL %...H character number
+-- U+hexadecimal %...u character number
+-- U+HEXADECIMAL %...U character number
+-- points %p number (scaled points)
+-- basepoints %b number (scaled points)
+-- table concat %...t table
+-- table concat %{.}t table
+-- serialize %...T sequenced (no nested tables)
+-- serialize %{.}T sequenced (no nested tables)
+-- boolean (logic) %l boolean
+-- BOOLEAN %L boolean
+-- whitespace %...w number
+-- whitespace %...W (fixed)
+-- automatic %...a 'whatever' (string, table, ...)
+-- automatic %...A "whatever" (string, table, ...)
+-- zap %...z skip
+-- comma/period real %...m
+-- period/comma real %...M
+-- formatted float %...k n.m
local n = 0
@@ -423,6 +443,27 @@ end
-- print(number.formatted(12345678,true))
-- print(number.formatted(1234.56,"!","?"))
+local p = Cs(
+ P("-")^0
+ * (P("0")^1/"")^0
+ * (1-P("."))^0
+ * (P(".") * P("0")^1 * P(-1)/"" + P(".")^0)
+ * P(1-P("0")^1*P(-1))^0
+ )
+
+function number.compactfloat(n,fmt)
+ if n == 0 then
+ return "0"
+ elseif n == 1 then
+ return "1"
+ end
+ n = lpegmatch(p,format(fmt or "%0.3f",n))
+ if n == "." or n == "" or n == "-" then
+ return "0"
+ end
+ return n
+end
+
local zero = P("0")^1 / ""
local plus = P("+") / ""
local minus = P("-")
@@ -483,7 +524,7 @@ return function(%s) return %s end
local preamble, environment = "", { }
-if _LUAVERSION < 5.2 then
+if LUAVERSION < 5.2 then
preamble = [[
local lpeg=lpeg
@@ -499,6 +540,7 @@ local utfchar=utf.char
local utfbyte=utf.byte
local lpegmatch=lpeg.match
local nspaces=string.nspaces
+local utfpadding=string.utfpadding
local tracedchar=string.tracedchar
local autosingle=string.autosingle
local autodouble=string.autodouble
@@ -525,6 +567,7 @@ else
utfbyte = utf.byte,
lpegmatch = lpeg.match,
nspaces = string.nspaces,
+ utfpadding = string.utfpadding,
tracedchar = string.tracedchar,
autosingle = string.autosingle,
autodouble = string.autodouble,
@@ -576,9 +619,36 @@ local format_S = function(f) -- can be optimized
end
end
+local format_right = function(f)
+ n = n + 1
+ f = tonumber(f)
+ if not f or f == 0 then
+ return format("(a%s or '')",n)
+ elseif f > 0 then
+ return format("utfpadding(a%s,%i)..a%s",n,f,n)
+ else
+ return format("a%s..utfpadding(a%s,%i)",n,n,f)
+ end
+end
+
+local format_left = function(f)
+ n = n + 1
+ f = tonumber(f)
+ if not f or f == 0 then
+ return format("(a%s or '')",n)
+ end
+ if f < 0 then
+ return format("utfpadding(a%s,%i)..a%s",n,-f,n)
+ else
+ return format("a%s..utfpadding(a%s,%i)",n,n,-f)
+ end
+end
+
local format_q = function()
n = n + 1
- return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster)
+ -- lua 5.3 has a different q than lua 5.2 (which does a tostring on numbers)
+ -- return format("(a%s ~= nil and format('%%q',a%s) or '')",n,n)
+ return format("(a%s ~= nil and format('%%q',tostring(a%s)) or '')",n,n)
end
local format_Q = function() -- can be optimized
@@ -907,6 +977,9 @@ local builder = Cs { "start",
+ V("m") + V("M") -- new (formatted number)
+ V("z") -- new
--
+ + V(">") -- left padding
+ + V("<") -- right padding
+ --
-- + V("?") -- ignored, probably messed up %
)
+ V("*")
@@ -929,7 +1002,7 @@ local builder = Cs { "start",
["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal)
--
["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring)
- ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring)
+ ["Q"] = (prefix_any * P("Q")) / format_Q, -- %Q => %q (tostring)
["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros)
["k"] = (prefix_sub * P("k")) / format_k, -- %k => like f but with n.m
["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular)
@@ -957,11 +1030,14 @@ local builder = Cs { "start",
["m"] = (prefix_tab * P("m")) / format_m, -- %m => xxx.xxx.xxx,xx (optional prefix instead of .)
["M"] = (prefix_tab * P("M")) / format_M, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,)
--
- ["z"] = (prefix_any * P("z")) / format_z, -- %M => xxx,xxx,xxx.xx (optional prefix instead of ,)
+ ["z"] = (prefix_any * P("z")) / format_z, -- %z => skip n arguments
--
["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring)
["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
--
+ ["<"] = (prefix_any * P("<")) / format_left,
+ [">"] = (prefix_any * P(">")) / format_right,
+ --
["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%")^1) / format_rest, -- rest (including %%)
["?"] = Cs(((1-P("%"))^1 )^1) / format_rest, -- rest (including %%)
--
@@ -986,6 +1062,14 @@ local direct = Cs (
* P(-1) / [[local format = string.format return function(str) return format("%0",str) end]]
)
+-- local direct = Cs (
+-- P("%")
+-- * (S("+- .") + R("09"))^0
+-- * S("sqidfgGeExXo")
+-- * (1-P("%"))^0
+-- * P(-1) / [[local format = string.format return function(str) return format([==[%0]==],str) end]]
+-- )
+
local function make(t,str)
local f
local p
@@ -1058,7 +1142,7 @@ strings.formatters = { }
-- _connector_ is an experiment
-if _LUAVERSION < 5.2 then
+if LUAVERSION < 5.2 then
function strings.formatters.new(noconcat)
local t = { _type_ = "formatter", _connector_ = noconcat and "," or "..", _extensions_ = { }, _preamble_ = preamble, _environment_ = { } }
@@ -1118,7 +1202,7 @@ patterns.luaquoted = Cs(Cc('"') * ((1-S('"\n'))^1 + P('"')/'\\"' + P('\n')/'\\n"
-- escaping by lpeg is faster for strings without quotes, slower on a string with quotes, but
-- faster again when other q-escapables are found (the ones we don't need to escape)
-if _LUAVERSION < 5.2 then
+if LUAVERSION < 5.2 then
add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],"local xmlescape = lpeg.patterns.xmlescape")
add(formatters,"tex",[[lpegmatch(texescape,%s)]],"local texescape = lpeg.patterns.texescape")
@@ -1183,3 +1267,21 @@ local pattern = Cs((newline / (os.newline or "\r") + 1)^0)
function string.replacenewlines(str)
return lpegmatch(pattern,str)
end
+
+--
+
+function strings.newcollector()
+ local result, r = { }, 0
+ return
+ function(fmt,str,...) -- write
+ r = r + 1
+ result[r] = str == nil and fmt or formatters[fmt](str,...)
+ end,
+ function(connector) -- flush
+ if result then
+ local str = concat(result,connector)
+ result, r = { }, 0
+ return str
+ end
+ end
+end
diff --git a/tex/context/base/mkiv/util-tab.lua b/tex/context/base/mkiv/util-tab.lua
index fb2702228..ebe51eb1a 100644
--- a/tex/context/base/mkiv/util-tab.lua
+++ b/tex/context/base/mkiv/util-tab.lua
@@ -478,11 +478,11 @@ end
-- inspect(table.drop({ { a=2 }, {a=3} }))
-- inspect(table.drop({ { a=2 }, {a=3} },true))
-function table.autokey(t,k)
- local v = { }
- t[k] = v
- return v
-end
+-- function table.autokey(t,k) -- replaced
+-- local v = { }
+-- t[k] = v
+-- return v
+-- end
local selfmapper = { __index = function(t,k) t[k] = k return k end }
@@ -616,6 +616,10 @@ local is_simple_table = table.is_simple_table
-- return nil
-- end
+-- In order to overcome the luajit (65K constant) limitation I tried a split approach,
+-- i.e. outputting the first level tables as locals but that failed with large cjk
+-- fonts too so I removed that ... just use luatex instead.
+
local function serialize(root,name,specification)
if type(specification) == "table" then
diff --git a/tex/context/base/mkiv/util-tpl.lua b/tex/context/base/mkiv/util-tpl.lua
index 468dd429c..5e75a151b 100644
--- a/tex/context/base/mkiv/util-tpl.lua
+++ b/tex/context/base/mkiv/util-tpl.lua
@@ -16,7 +16,7 @@ local templates = utilities.templates
local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end)
local report_template = logs.reporter("template")
-local tostring = tostring
+local tostring, next = tostring, next
local format, sub, byte = string.format, string.sub, string.byte
local P, C, R, Cs, Cc, Carg, lpegmatch, lpegpatterns = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.match, lpeg.patterns
diff --git a/tex/context/base/mkiv/util-you.lua b/tex/context/base/mkiv/util-you.lua
index 79a0e83e7..32a7e07d4 100644
--- a/tex/context/base/mkiv/util-you.lua
+++ b/tex/context/base/mkiv/util-you.lua
@@ -30,22 +30,43 @@ utilities.youless = youless
local lpegmatch = lpeg.match
local formatters = string.formatters
+local sortedhash = table.sortedhash
+
+local tonumber, type, next = tonumber, type, next
+
+local round, div = math.round, math.div
+local osdate, ostime = os.date, os.time
+
+local report = logs.reporter("youless")
+local trace = false
-- dofile("http.lua")
local http = socket.http
-local f_normal = formatters["http://%s/V?%s=%i&f=j"]
-local f_password = formatters["http://%s/L?w=%s"]
+-- f=j : json
+
+local f_password = formatters["http://%s/L?w=%s"]
-local function fetch(url,password,what,i)
- local url = f_normal(url,what,i)
- local data, h = http.request(url)
- local result = data and utilities.json.tolua(data)
- return result
+local f_fetchers = {
+ electricity = formatters["http://%s/V?%s=%i&f=j"],
+ gas = formatters["http://%s/W?%s=%i&f=j"],
+ pulse = formatters["http://%s/Z?%s=%i&f=j"],
+}
+
+local function fetch(url,password,what,i,category)
+ local fetcher = f_fetchers[category or "electricity"]
+ if not fetcher then
+ report("invalid fetcher %a",category)
+ else
+ local url = fetcher(url,what,i)
+ local data, h = http.request(url)
+ local result = data and utilities.json.tolua(data)
+ return result
+ end
end
--- "123" " 1,234"
+-- "123" " 23" " 1,234"
local tovalue = lpeg.Cs((lpeg.R("09") + lpeg.P(1)/"")^1) / tonumber
@@ -58,48 +79,99 @@ local totime = (lpeg.C(4) / tonumber) * lpeg.P("-")
* (lpeg.C(2) / tonumber) * lpeg.P(":")
* (lpeg.C(2) / tonumber)
-local function get(url,password,what,i,data,average,variant)
+local function collapsed(data,dirty)
+ for list, parent in next, dirty do
+ local t, n = { }, { }
+ for k, v in next, list do
+ local d = div(k,10) * 10
+ t[d] = (t[d] or 0) + v
+ n[d] = (n[d] or 0) + 1
+ end
+ for k, v in next, t do
+ t[k] = round(t[k]/n[k])
+ end
+ parent[1][parent[2]] = t
+ end
+ return data
+end
+
+local function get(url,password,what,step,data,option,category)
if not data then
data = { }
end
+ local dirty = { }
while true do
- local d = fetch(url,password,what,i)
- if d and next(d) then
+ local d = fetch(url,password,what,step,category)
+ local v = d and d.val
+ if v and #v > 0 then
local c_year, c_month, c_day, c_hour, c_minute, c_seconds = lpegmatch(totime,d.tm)
if c_year and c_seconds then
local delta = tonumber(d.dt)
- local tnum = os.time { year = c_year, month = c_month, day = c_day, hour = c_hour, minute = c_minute }
- local v = d.val
+ local tnum = ostime {
+ year = c_year,
+ month = c_month,
+ day = c_day,
+ hour = c_hour,
+ min = c_minute,
+ sec = c_seconds,
+ }
for i=1,#v do
- local newvalue = lpegmatch(tovalue,v[i])
- if newvalue then
- local t = tnum + (i-1)*delta
- local current = os.date("%Y-%m-%dT%H:%M:%S",t)
- local c_year, c_month, c_day, c_hour, c_minute, c_seconds = lpegmatch(totime,current)
- if c_year and c_seconds then
- local years = data.years if not years then years = { } data.years = years end
- local d_year = years[c_year] if not d_year then d_year = { } years[c_year] = d_year end
- local months = d_year.months if not months then months = { } d_year.months = months end
- local d_month = months[c_month] if not d_month then d_month = { } months[c_month] = d_month end
- local days = d_month.days if not days then days = { } d_month.days = days end
- local d_day = days[c_day] if not d_day then d_day = { } days[c_day] = d_day end
- if average then
- d_day.average = newvalue
- else
- local hours = d_day.hours if not hours then hours = { } d_day.hours = hours end
- local d_hour = hours[c_hour] if not d_hour then d_hour = { } hours[c_hour] = d_hour end
- d_hour[c_minute] = newvalue
+ local vi = v[i]
+ if vi ~= "*" then
+ local newvalue = lpegmatch(tovalue,vi)
+ if newvalue then
+ local t = tnum + (i-1)*delta
+ -- local current = osdate("%Y-%m-%dT%H:%M:%S",t)
+ -- local c_year, c_month, c_day, c_hour, c_minute, c_seconds = lpegmatch(totime,current)
+ local c = osdate("*t",tnum + (i-1)*delta)
+ local c_year = c.year
+ local c_month = c.month
+ local c_day = c.day
+ local c_hour = c.hour
+ local c_minute = c.min
+ local c_seconds = c.sec
+ if c_year and c_seconds then
+ local years = data.years if not years then years = { } data.years = years end
+ local d_year = years[c_year] if not d_year then d_year = { } years[c_year] = d_year end
+ local months = d_year.months if not months then months = { } d_year.months = months end
+ local d_month = months[c_month] if not d_month then d_month = { } months[c_month] = d_month end
+ local days = d_month.days if not days then days = { } d_month.days = days end
+ local d_day = days[c_day] if not d_day then d_day = { } days[c_day] = d_day end
+ if option == "average" or option == "total" then
+ if trace then
+ local oldvalue = d_day[option]
+ if oldvalue and oldvalue ~= newvalue then
+ report("category %s, step %i, time %s: old %s %s updated to %s",category,step,osdate("%Y-%m-%dT%H:%M:%S",t),option,oldvalue,newvalue)
+ end
+ end
+ d_day[option] = newvalue
+ elseif option == "value" then
+ local hours = d_day.hours if not hours then hours = { } d_day.hours = hours end
+ local d_hour = hours[c_hour] if not d_hour then d_hour = { } hours[c_hour] = d_hour end
+ if trace then
+ local oldvalue = d_hour[c_minute]
+ if oldvalue and oldvalue ~= newvalue then
+ report("category %s, step %i, time %s: old %s %s updated to %s",category,step,osdate("%Y-%m-%dT%H:%M:%S",t),"value",oldvalue,newvalue)
+ end
+ end
+ d_hour[c_minute] = newvalue
+ if not dirty[d_hour] then
+ dirty[d_hour] = { hours, c_hour }
+ end
+ else
+ -- can't happen
+ end
end
end
end
end
end
else
- return data
+ return collapsed(data,dirty)
end
- i = i + 1
+ step = step + 1
end
- return data
+ return collapsed(data,dirty)
end
-- day of month (kwh)
@@ -132,6 +204,7 @@ function youless.collect(specification)
local detail = specification.detail or false
local nobackup = specification.nobackup or false
local password = specification.password or ""
+ local oldstuff = false
if host == "" then
return
end
@@ -140,13 +213,36 @@ function youless.collect(specification)
else
data = table.load(filename) or data
end
- if variant == "kwh" then
- get(host,password,"m",1,data,true)
- elseif variant == "watt" then
- get(host,password,"d",0,data,true)
- get(host,password,"w",1,data)
+ if variant == "electricity" then
+ get(host,password,"m",1,data,"total","electricity")
+ if oldstuff then
+ get(host,password,"d",1,data,"average","electricity")
+ end
+ get(host,password,"w",1,data,"value","electricity")
if detail then
- get(host,password,"h",1,data) -- todo: get this for calculating the precise max
+ get(host,password,"h",1,data,"value","electricity") -- todo: get this for calculating the precise max
+ end
+ elseif variant == "pulse" then
+ -- It looks like the 'd' option returns the wrong values or at least not the same sort
+ -- as the other ones, so we calculate the means ourselves. And 'w' is not consistent with
+ -- that too, so ...
+ get(host,password,"m",1,data,"total","pulse")
+ if oldstuff then
+ get(host,password,"d",1,data,"average","pulse")
+ end
+ detail = true
+ get(host,password,"w",1,data,"value","pulse")
+ if detail then
+ get(host,password,"h",1,data,"value","pulse")
+ end
+ elseif variant == "gas" then
+ get(host,password,"m",1,data,"total","gas")
+ if oldstuff then
+ get(host,password,"d",1,data,"average","gas")
+ end
+ get(host,password,"w",1,data,"value","gas")
+ if detail then
+ get(host,password,"h",1,data,"value","gas")
end
else
return
@@ -164,15 +260,21 @@ function youless.collect(specification)
if type(check) == "table" then
local keepname = file.replacesuffix(filename,"old")
os.remove(keepname)
- if not lfs.isfile(keepname) then
+ if lfs.isfile(keepname) then
+ report("error in removing %a",keepname)
+ else
os.rename(filename,keepname)
os.rename(tempname,filename)
end
+ else
+ report("error in saving %a",tempname)
end
else
local keepname = file.join(path,formatters["%s-%s"](os.date("%Y-%m-%d-%H-%M-%S",os.time()),base))
os.rename(filename,keepname)
- if not lfs.isfile(filename) then
+ if lfs.isfile(filename) then
+ report("error in renaming %a",filename)
+ else
table.save(filename,data)
end
end
@@ -181,76 +283,111 @@ end
-- local data = youless.collect {
-- host = "192.168.2.50",
--- variant = "watt",
--- filename = "youless-watt.lua"
+-- variant = "electricity",
+-- category = "electricity",
+-- filename = "youless-electricity.lua"
-- }
-
+--
-- inspect(data)
-- local data = youless.collect {
--- host = "192.168.2.50",
--- variant = "kwh",
--- filename = "youless-kwh.lua"
+-- host = "192.168.2.50",
+-- variant = "pulse",
+-- category = "electricity",
+-- filename = "youless-pulse.lua"
-- }
+--
+-- inspect(data)
+-- local data = youless.collect {
+-- host = "192.168.2.50",
+-- variant = "gas",
+-- category = "gas",
+-- filename = "youless-gas.lua"
+-- }
+--
-- inspect(data)
+-- We remain compatible so we stick to electricity and not unit fields.
+
function youless.analyze(data)
- if data and data.variant == "watt" and data.years then
- for y, year in next, data.years do
- local a_year, n_year, m_year = 0, 0, 0
- if year.months then
- for m, month in next, year.months do
- local a_month, n_month = 0, 0
- if month.days then
- for d, day in next, month.days do
- local a_day, n_day = 0, 0
- if day.hours then
- for h, hour in next, day.hours do
- local a_hour, n_hour, m_hour = 0, 0, 0
- for k, v in next, hour do
- if type(k) == "number" then
- a_hour = a_hour + v
- n_hour = n_hour + 1
- if v > m_hour then
- m_hour = v
- end
+ if type(data) == "string" then
+ data = table.load(data)
+ end
+ if type(data) ~= "table" then
+ return false, "no data"
+ end
+ if not data.years then
+ return false, "no years"
+ end
+ local variant = data.variant
+ local unit, maxunit
+ if variant == "electricity" or variant == "watt" then
+ unit = "watt"
+ maxunit = "maxwatt"
+ elseif variant == "gas" then
+ unit = "liters"
+ maxunit = "maxliters"
+ elseif variant == "pulse" then
+ unit = "watt"
+ maxunit = "maxwatt"
+ else
+ return false, "invalid variant"
+ end
+ for y, year in next, data.years do
+ local a_year, n_year, m_year = 0, 0, 0
+ if year.months then
+ for m, month in next, year.months do
+ local a_month, n_month = 0, 0
+ if month.days then
+ for d, day in next, month.days do
+ local a_day, n_day = 0, 0
+ if day.hours then
+ for h, hour in next, day.hours do
+ local a_hour, n_hour, m_hour = 0, 0, 0
+ for k, v in next, hour do
+ if type(k) == "number" then
+ a_hour = a_hour + v
+ n_hour = n_hour + 1
+ if v > m_hour then
+ m_hour = v
end
end
- n_day = n_day + n_hour
- a_day = a_day + a_hour
- hour.maxwatt = m_hour
- hour.watt = a_hour / n_hour
- if m_hour > m_year then
- m_year = m_hour
- end
end
- end
- if n_day > 0 then
- a_month = a_month + a_day
- n_month = n_month + n_day
- day.watt = a_day / n_day
- else
- day.watt = 0
+ n_day = n_day + n_hour
+ a_day = a_day + a_hour
+ hour[maxunit] = m_hour
+ hour[unit] = a_hour / n_hour
+ if m_hour > m_year then
+ m_year = m_hour
+ end
end
end
- end
- if n_month > 0 then
- a_year = a_year + a_month
- n_year = n_year + n_month
- month.watt = a_month / n_month
- else
- month.watt = 0
+ if n_day > 0 then
+ a_month = a_month + a_day
+ n_month = n_month + n_day
+ day[unit] = a_day / n_day
+ else
+ day[unit] = 0
+ end
end
end
- end
- if n_year > 0 then
- year.watt = a_year / n_year
- year.maxwatt = m_year
- else
- year.watt = 0
- year.maxwatt = 0
+ if n_month > 0 then
+ a_year = a_year + a_month
+ n_year = n_year + n_month
+ month[unit] = a_month / n_month
+ else
+ month[unit] = 0
+ end
end
end
+ if n_year > 0 then
+ year[unit] = a_year / n_year
+ year[maxunit] = m_year
+ else
+ year[unit] = 0
+ year[maxunit] = 0
+ end
end
+ return data
end
diff --git a/tex/context/fonts/mkiv/bonum-math.lfg b/tex/context/fonts/mkiv/bonum-math.lfg
index 00576aaef..8dfa63405 100644
--- a/tex/context/fonts/mkiv/bonum-math.lfg
+++ b/tex/context/fonts/mkiv/bonum-math.lfg
@@ -8,8 +8,13 @@ return {
author = "Hans Hagen",
copyright = "ConTeXt development team",
mathematics = {
+ dimensions = {
+ default = {
+ [0x1D453] = { xoffset = "llx" },-- 𝑓
+ },
+ },
kerns = {
- [0x1D449] = kern_200, --
+ [0x1D449] = kern_200, -- 𝑉
[0x1D44A] = kern_100, -- 𝑊
},
alternates = {
diff --git a/tex/context/fonts/mkiv/cambria-math.lfg b/tex/context/fonts/mkiv/cambria-math.lfg
index ae875e64d..c18854d18 100644
--- a/tex/context/fonts/mkiv/cambria-math.lfg
+++ b/tex/context/fonts/mkiv/cambria-math.lfg
@@ -50,5 +50,20 @@ return {
-- [0x1D44F] = 1000, -- 𝑎𝑏 demo
-- }
-- },
+ -- kerns = {
+ -- [0x1D453] = {
+ -- force = true,
+ -- topright = {
+ -- {
+ -- kern = 1000,
+ -- },
+ -- },
+ -- bottomright = {
+ -- {
+ -- kern = 1000,
+ -- },
+ -- },
+ -- },
+ -- },
},
}
diff --git a/tex/context/fonts/mkiv/lm-math.lfg b/tex/context/fonts/mkiv/lm-math.lfg
index b8c996979..a0fd2c21c 100644
--- a/tex/context/fonts/mkiv/lm-math.lfg
+++ b/tex/context/fonts/mkiv/lm-math.lfg
@@ -30,9 +30,9 @@
-- end
-- end
--
--- patches.register("after","prepare glyphs","^lmroman", patch)
--- patches.register("after","prepare glyphs","^lmsans", patch)
--- patches.register("after","prepare glyphs","^lmtypewriter",patch)
+-- patches.register("before","check extra features","^lmroman", patch)
+-- patches.register("before","check extra features","^lmsans", patch)
+-- patches.register("before","check extra features","^lmtypewriter",patch)
-- rm-lmr5 : LMMathRoman5-Regular
-- rm-lmbx5 : LMMathRoman5-Bold ]
diff --git a/tex/context/fonts/mkiv/lm.lfg b/tex/context/fonts/mkiv/lm.lfg
index aebedd01b..ec37a2975 100644
--- a/tex/context/fonts/mkiv/lm.lfg
+++ b/tex/context/fonts/mkiv/lm.lfg
@@ -34,6 +34,8 @@ return {
height = 960,
depth = 40,
},
+ -- [0xFE932] = { xoffset = 50, width = 290 }, -- used prime
+ -- [0x2032] = { xoffset = 50, width = 290 }, -- prime
},
signs = {
-- set dimensions
diff --git a/tex/context/fonts/mkiv/lucida-opentype-math.lfg b/tex/context/fonts/mkiv/lucida-opentype-math.lfg
index 29206da1a..7596f139a 100644
--- a/tex/context/fonts/mkiv/lucida-opentype-math.lfg
+++ b/tex/context/fonts/mkiv/lucida-opentype-math.lfg
@@ -1,5 +1,16 @@
----- kern_250 = { bottomright = { { kern = -250 } }, force = true }
+-- RadicalVerticalGap = 50
+-- RadicalDisplayStyleVerticalGap = 175
+
+-- RadicalVerticalGap = 166
+-- RadicalDisplayStyleVerticalGap = 345
+
+local function FixRadicalDisplayStyleVerticalGap(value,target,original)
+ local o = original.mathparameters.RadicalVerticalGap -- 50
+ return 2 * o * target.parameters.factor
+end
+
return {
name = "lucida-opentype-math",
version = "1.00",
@@ -7,6 +18,9 @@ return {
author = "Hans Hagen",
copyright = "ConTeXt development team",
mathematics = {
+ parameters = {
+ RadicalDisplayStyleVerticalGap = FixRadicalDisplayStyleVerticalGap,
+ },
alternates = {
italic = { feature = 'ss01', value = 1, comment = "Mathematical Alternative Lowercase Italic" },
arrow = { feature = 'ss02', value = 1, comment = "Mathematical Alternative Smaller Arrows" },
@@ -21,7 +35,16 @@ return {
-- },
dimensions = {
default = { -- experimental values
- [0x2044] = { xoffset = 275, width = 600 },
+ [0x02044] = { xoffset = 275, width = 600 },
+ -- primes
+ [0xFE932] = { width = 200 },
+ [0xFE933] = { width = 500 },
+ [0xFE934] = { width = 800 },
+ [0xFE935] = { width = 1100 },
+ -- reverse primes
+ [0xFE935] = { width = 200 },
+ [0xFE936] = { width = 500 },
+ [0xFE937] = { width = 800 },
},
},
},
diff --git a/tex/context/fonts/mkiv/pagella-math.lfg b/tex/context/fonts/mkiv/pagella-math.lfg
index 40d50383b..c85ff3f6b 100644
--- a/tex/context/fonts/mkiv/pagella-math.lfg
+++ b/tex/context/fonts/mkiv/pagella-math.lfg
@@ -1,6 +1,8 @@
local kern_200 = { bottomright = { { kern = -200 } } }
local kern_100 = { bottomright = { { kern = -100 } } }
+-- Beware of updates !
+
return {
name = "pagella-math",
version = "1.00",
@@ -8,8 +10,14 @@ return {
author = "Hans Hagen",
copyright = "ConTeXt development team",
mathematics = {
+ dimensions = {
+ default = {
+ -- [0x1D453] = { xoffset = 162, width = 278 + 162 },-- 𝑓
+ [0x1D453] = { xoffset = "llx" },-- 𝑓
+ },
+ },
kerns = {
- [0x1D449] = kern_200, --
+ [0x1D449] = kern_200, -- 𝑉
[0x1D44A] = kern_100, -- 𝑊
},
alternates = {
diff --git a/tex/context/fonts/mkiv/px-math.lfg b/tex/context/fonts/mkiv/px-math.lfg
index 14f71dad3..08d208b3f 100644
--- a/tex/context/fonts/mkiv/px-math.lfg
+++ b/tex/context/fonts/mkiv/px-math.lfg
@@ -10,8 +10,9 @@ return {
},
virtuals = {
["px-math"] = {
- { name = "texgyre-pagella-math-regular.otf", features = "virtualmath", main = true },
- { name = "texgyrepagella-regular.otf", features = "virtualmath", vector = "tex-mr-missing" } ,
+-- { name = "texgyre-pagella-math-regular.otf", features = "virtualmath", main = true },
+-- { name = "texgyrepagella-regular.otf", features = "virtualmath", vector = "tex-mr-missing" } ,
+ { name = "texgyrepagella-regular.otf", features = "virtualmath" } ,
{ name = "rpxr.tfm", vector = "tex-mr" } ,
{ name = "rpxmi.tfm", vector = "tex-mi", skewchar=0x7F },
{ name = "rpxpplri.tfm", vector = "tex-it", skewchar=0x7F },
diff --git a/tex/context/fonts/mkiv/stix-two-math.lfg b/tex/context/fonts/mkiv/stix-two-math.lfg
index ded97f92e..8d9c9c71a 100644
--- a/tex/context/fonts/mkiv/stix-two-math.lfg
+++ b/tex/context/fonts/mkiv/stix-two-math.lfg
@@ -1,3 +1,34 @@
+-- Bah, I really hate these patches especially because one needs to make
+-- sure that they are still valid when the font gets updated. So, let's
+-- do it runtime (not in the cached copy) and issue a warning every run.
+-- As we cannot rely on version numbers (if we have more patches) we
+-- check for values instead.
+--
+-- This font also has inconsistent italics in smaller sizes which we can
+-- fix in a more general way but I'm not sure if we want that.
+
+local function fix_italic(target,original,name,value,factor)
+ local m = target.parameters.mathsize
+ if m then
+ local u = type(name) == "number" and name or original.resources.unicodes[name]
+ if u then
+ local c = target.characters[u]
+ if c then
+ local i = c.italic
+ if i then
+ local d = original.descriptions[u]
+ if d and d.math.italic == value then
+ if m then
+ logs.report("patching font","fixing italic correction of %U at math size %i by %0.3f",u,m,factor)
+ c.italic = factor * i
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
return {
name = "stix-two-math",
version = "1.00",
@@ -21,6 +52,14 @@ return {
-- todo = { feature = 'ss14', value = 1, comment = "" },
circled = { feature = 'ss16', value = 1, comment = "Mathematical Alternative Circled Operators" },
},
+ tweaks = {
+ aftercopying = {
+ function(target,original)
+ fix_italic(target,original,"uni222B.updsp",80,3.5)
+ fix_italic(target,original,"uni222B.up", 80,2.5)
+ end,
+ },
+ },
},
}
diff --git a/tex/context/fonts/mkiv/termes-math.lfg b/tex/context/fonts/mkiv/termes-math.lfg
index 557216cb1..1501fd536 100644
--- a/tex/context/fonts/mkiv/termes-math.lfg
+++ b/tex/context/fonts/mkiv/termes-math.lfg
@@ -8,8 +8,13 @@ return {
author = "Hans Hagen",
copyright = "ConTeXt development team",
mathematics = {
+ dimensions = {
+ default = {
+ [0x1D453] = { xoffset = "llx" },-- 𝑓
+ },
+ },
kerns = {
- [0x1D449] = kern_200, --
+ [0x1D449] = kern_200, -- 𝑉
[0x1D44A] = kern_100, -- 𝑊
},
alternates = {
diff --git a/tex/context/fonts/mkiv/type-imp-dejavu.mkiv b/tex/context/fonts/mkiv/type-imp-dejavu.mkiv
index 710aada5d..582d8a764 100644
--- a/tex/context/fonts/mkiv/type-imp-dejavu.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-dejavu.mkiv
@@ -17,26 +17,26 @@
\starttypescript [\s!serif] [dejavu] [\s!name]
\setups[\s!font:\s!fallback:\s!serif]
- \definefontsynonym [\s!Serif] [\s!name:dejavuserif] [\s!features=\s!default]
- \definefontsynonym [\s!SerifBold] [\s!name:dejavuserifbold] [\s!features=\s!default]
- \definefontsynonym [\s!SerifItalic] [\s!name:dejavuserifitalic] [\s!features=\s!default]
- \definefontsynonym [\s!SerifBoldItalic] [\s!name:dejavuserifbolditalic] [\s!features=\s!default]
+ \definefontsynonym [\s!Serif] [\s!name:dejavuserif] [\s!features=\s!default,\s!fallbacks=\s!Serif]
+ \definefontsynonym [\s!SerifBold] [\s!name:dejavuserifbold] [\s!features=\s!default,\s!fallbacks=\s!SerifBold]
+ \definefontsynonym [\s!SerifItalic] [\s!name:dejavuserifitalic] [\s!features=\s!default,\s!fallbacks=\s!SerifItalic]
+ \definefontsynonym [\s!SerifBoldItalic] [\s!name:dejavuserifbolditalic] [\s!features=\s!default,\s!fallbacks=\s!SerifBoldItalic]
\stoptypescript
\starttypescript [\s!sans] [dejavu] [\s!name]
\setups[\s!font:\s!fallback:\s!sans]
- \definefontsynonym [\s!Sans] [\s!name:dejavusans] [\s!features=\s!default]
- \definefontsynonym [\s!SansBold] [\s!name:dejavusansbold] [\s!features=\s!default]
- \definefontsynonym [\s!SansItalic] [\s!name:dejavusansoblique] [\s!features=\s!default]
- \definefontsynonym [\s!SansBoldItalic] [\s!name:dejavusansboldoblique] [\s!features=\s!default]
+ \definefontsynonym [\s!Sans] [\s!name:dejavusans] [\s!features=\s!default,\s!fallbacks=\s!Sans]
+ \definefontsynonym [\s!SansBold] [\s!name:dejavusansbold] [\s!features=\s!default,\s!fallbacks=\s!SansBold]
+ \definefontsynonym [\s!SansItalic] [\s!name:dejavusansoblique] [\s!features=\s!default,\s!fallbacks=\s!SansItalic]
+ \definefontsynonym [\s!SansBoldItalic] [\s!name:dejavusansboldoblique] [\s!features=\s!default,\s!fallbacks=\s!SansBoldItalic]
\stoptypescript
\starttypescript [\s!mono] [dejavu] [\s!name]
\setups[\s!font:\s!fallback:\s!mono]
- \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=\s!none]
- \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=\s!none]
- \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=\s!none]
- \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none]
+ \definefontsynonym [\s!Mono] [\s!name:dejavusansmono] [\s!features=\s!none,\s!fallbacks=\s!Mono]
+ \definefontsynonym [\s!MonoBold] [\s!name:dejavusansmonobold] [\s!features=\s!none,\s!fallbacks=\s!MonoBold]
+ \definefontsynonym [\s!MonoItalic] [\s!name:dejavusansmonooblique] [\s!features=\s!none,\s!fallbacks=\s!MonoItalic]
+ \definefontsynonym [\s!MonoBoldItalic] [\s!name:dejavusansmonoboldoblique] [\s!features=\s!none,\s!fallbacks=\s!MonoBoldItalic]
\stoptypescript
\starttypescript [\s!math][dejavu][\s!name]
diff --git a/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv b/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv
index cd474242f..bba07f41d 100644
--- a/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-ebgaramond.mkiv
@@ -40,8 +40,8 @@
\starttypescript[ebgaramond]
\definetypeface [ebgaramond] [rm] [serif] [ebgaramond] [default] [designsize=auto]
- \definetypeface [ebgaramond] [tt] [mono] [dejavu] [default]
- \definetypeface [ebgaramond] [mm] [math] [bonum] [default]
+ \definetypeface [ebgaramond] [tt] [mono] [dejavu] [default] [rscale=0.8] % rather arbitrary but seldom mixed anyway
+ \definetypeface [ebgaramond] [mm] [math] [bonum] [default] [rscale=0.8] % rather arbitrary but seldom mixed anyway
\stoptypescript
\stoptypescriptcollection
diff --git a/tex/context/fonts/mkiv/type-imp-gofonts.mkiv b/tex/context/fonts/mkiv/type-imp-gofonts.mkiv
new file mode 100644
index 000000000..632021674
--- /dev/null
+++ b/tex/context/fonts/mkiv/type-imp-gofonts.mkiv
@@ -0,0 +1,47 @@
+%D \module
+%D [ file=type-imp-gofonts,
+%D version=2017.06.13,
+%D title=\CONTEXT\ Typescript Macros,
+%D subtitle=Go fonts,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\starttypescriptcollection[gofonts]
+
+ \starttypescript [\s!serif] [gofonts] [\s!name]
+ \setups[\s!font:\s!fallback:\s!serif]
+ \definefontsynonym [\s!Serif] [\s!name:goregular] [\s!features=\s!default]
+ \definefontsynonym [\s!SerifBold] [\s!name:gobold] [\s!features=\s!default]
+ \definefontsynonym [\s!SerifItalic] [\s!name:goitalic] [\s!features=\s!default]
+ \definefontsynonym [\s!SerifBoldItalic] [\s!name:gobolditalic] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [gofonts] [\s!name]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!name:goregular] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [\s!name:gobold] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [\s!name:goitalic] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [\s!name:gobolditalic] [\s!features=\s!default]
+ \stoptypescript
+
+ \starttypescript [\s!mono] [gofonts] [\s!name]
+ \setups[\s!font:\s!fallback:\s!mono]
+ \definefontsynonym [\s!Mono] [\s!name:gomono] [\s!features=\s!none]
+ \definefontsynonym [\s!MonoBold] [\s!name:gomonobold] [\s!features=\s!none]
+ \definefontsynonym [\s!MonoItalic] [\s!name:gomoitalic] [\s!features=\s!none]
+ \definefontsynonym [\s!MonoBoldItalic] [\s!name:gomobolditalic] [\s!features=\s!none]
+ \stoptypescript
+
+ \starttypescript[gofonts]
+ \definetypeface [gofonts] [\s!rm] [\s!serif] [gofonts] [\s!default]
+ \definetypeface [gofonts] [\s!ss] [\s!sans] [gofonts] [\s!default]
+ \definetypeface [gofonts] [\s!tt] [\s!mono] [gofonts] [\s!default]
+ \definetypeface [gofonts] [\s!mm] [\s!math] [gofonts] [\s!default]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/fonts/mkiv/type-imp-libertine.mkiv b/tex/context/fonts/mkiv/type-imp-libertine.mkiv
index 4620995b9..4fac1a6aa 100644
--- a/tex/context/fonts/mkiv/type-imp-libertine.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-libertine.mkiv
@@ -47,12 +47,13 @@
\starttypescript [\s!sans] [biolinum] [\s!name]
\setups[\s!font:\s!fallback:\s!sans]
- \definefontsynonym [\s!Sans] [Biolinum-Regular] [\s!features=\s!default]
- \definefontsynonym [\s!SansBold] [Biolinum-Bold] [\s!features=\s!default]
- \definefontsynonym [\s!SansItalic] [Biolinum-Italic] [\s!features=\s!default]
- \definefontsynonym [\s!SansSlanted] [Biolinum-Slanted] [\s!features=\s!default]
- \definefontsynonym [\s!SansBoldItalic] [Biolinum-BoldSlanted] [\s!features=\s!default]
- \definefontsynonym [SansCaps] [Biolinum-Regular] [\s!features=\s!smallcaps]
+ \definefontsynonym [\s!Sans] [Biolinum-Regular] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBold] [Biolinum-Bold] [\s!features=\s!default]
+ \definefontsynonym [\s!SansItalic] [Biolinum-Italic] [\s!features=\s!default]
+ \definefontsynonym [\s!SansSlanted] [Biolinum-Slanted] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldItalic] [Biolinum-BoldSlanted] [\s!features=\s!default]
+ \definefontsynonym [\s!SansBoldSlanted] [Biolinum-BoldSlanted] [\s!features=\s!default]
+ \definefontsynonym [SansCaps] [Biolinum-Regular] [\s!features=\s!smallcaps]
\stoptypescript
\starttypescript [libertine]
@@ -60,6 +61,7 @@
\definetypeface [libertine] [\s!ss] [\s!sans] [biolinum] [\s!default]
\definetypeface [libertine] [\s!tt] [\s!mono] [default] [\s!default]
%definetypeface [libertine] [\s!mm] [\s!math] [times] [\s!default]
+ \definetypeface [libertine] [\s!mm] [\s!math] [default] [\s!default]
\quittypescriptscanning
\stoptypescript
diff --git a/tex/context/fonts/mkiv/type-imp-mscore.mkiv b/tex/context/fonts/mkiv/type-imp-mscore.mkiv
index 971a03de7..d4aeec10d 100644
--- a/tex/context/fonts/mkiv/type-imp-mscore.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-mscore.mkiv
@@ -56,17 +56,17 @@
\stoptypescript
\starttypescript[mscore]
- \definetypeface [mscore] [\s!rm] [\s!serif] [mscoretimes] [\s!default]
- \definetypeface [mscore] [\s!ss] [\s!sans] [mscorearial] [\s!default] [\s!rscale=0.860]
- \definetypeface [mscore] [\s!tt] [\s!mono] [mscorecourier] [\s!default] [\s!rscale=1.065]
- \definetypeface [mscore] [\s!mm] [\s!math] [times] [\s!default] [\s!rscale=1.020]
+ \definetypeface [\typescriptone] [\s!rm] [\s!serif] [mscoretimes] [\s!default]
+ \definetypeface [\typescriptone] [\s!ss] [\s!sans] [mscorearial] [\s!default] [\s!rscale=0.860]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [mscorecourier] [\s!default] [\s!rscale=1.065]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [times] [\s!default] [\s!rscale=1.020]
\stoptypescript
- \starttypescript[mscorenarrow]
- \definetypeface [mscorenarrow] [\s!rm] [\s!serif] [mscoretimes] [\s!default]
- \definetypeface [mscorenarrow] [\s!ss] [\s!sans] [mscorearialnarrow] [\s!default] [\s!rscale=0.860]
- \definetypeface [mscorenarrow] [\s!tt] [\s!mono] [mscorecourier] [\s!default] [\s!rscale=1.065]
- \definetypeface [mscorenarrow] [\s!mm] [\s!math] [times] [\s!default] [\s!rscale=1.020]
+ \starttypescript[mscorenarrow,mscore-narrow]
+ \definetypeface [\typescriptone] [\s!rm] [\s!serif] [mscoretimes] [\s!default]
+ \definetypeface [\typescriptone] [\s!ss] [\s!sans] [mscorearialnarrow] [\s!default] [\s!rscale=0.860]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [mscorecourier] [\s!default] [\s!rscale=1.065]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [times] [\s!default] [\s!rscale=1.020]
\stoptypescript
% \starttypescript[mscoress]
@@ -77,3 +77,53 @@
% \stoptypescript
\stoptypescriptcollection
+
+% http://archive1.village.virginia.edu/spw4s/fonts/
+
+\starttypescriptcollection[microsoft-chinese-old]
+
+ % \starttypescript [\s!serif] [mschinese,mschinese-light]
+ % \definefontsynonym [STSong] [\s!file:stsong.ttf] [\s!features=chinese]
+ % \definefontsynonym [STKaiti] [\s!file:stkaiti.ttf] [\s!features=chinese]
+ % \definefontsynonym [STZHongsong] [\s!file:stzhongs.ttf] [\s!features=chinese]
+ % \definefontsynonym [STFangsong] [\s!file:stfangso.ttf] [\s!features=chinese]
+ % \stoptypescript
+
+ % \starttypescript [\s!sans] [mschinese]
+ % \definefontsynonym [STXIHei] [\s!file:stxihei.ttf] [\s!features=chinese]
+ % \stoptypescript
+
+ \starttypescript [\s!serif] [mschinese]
+ \setups[\s!font:\s!fallback:\s!serif]
+ \definefontsynonym [\s!Serif] [\s!file:stsong.ttf] [\s!features=chinese]
+ \definefontsynonym [\s!SerifItalic] [\s!file:stkaiti.ttf] [\s!features=chinese]
+ \definefontsynonym [\s!SerifBold] [\s!file:stzhongs.ttf] [\s!features=chinese]
+ \stoptypescript
+
+ \starttypescript [\s!serif] [mschinese-light]
+ \setups[\s!font:\s!fallback:\s!serif]
+ \definefontsynonym [\s!Serif] [\s!file:stfangso.ttf] [\s!features=chinese]
+ \definefontsynonym [\s!SerifItalic] [\s!file:stkaiti.ttf] [\s!features=chinese]
+ \definefontsynonym [\s!SerifBold] [\s!file:stzhongs.ttf] [\s!features=chinese]
+ \stoptypescript
+
+ \starttypescript [\s!sans] [mschinese]
+ \setups[\s!font:\s!fallback:\s!sans]
+ \definefontsynonym [\s!Sans] [\s!file:stxihei.ttf] [\s!features=chinese]
+ \stoptypescript
+
+ \starttypescript[mschinese]
+ \definetypeface [\typescriptone] [\s!rm] [\s!serif] [mschinese] [\s!default]
+ \definetypeface [\typescriptone] [\s!ss] [\s!sans] [mschinese] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [cambria]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [mscorecourier]
+ \stoptypescript
+
+ \starttypescript[mschineselight,mschinese-light]
+ \definetypeface [\typescriptone] [\s!rm] [\s!serif] [mschinese-light] [\s!default]
+ \definetypeface [\typescriptone] [\s!ss] [\s!sans] [mschinese] [\s!default]
+ \definetypeface [\typescriptone] [\s!mm] [\s!math] [cambria]
+ \definetypeface [\typescriptone] [\s!tt] [\s!mono] [mscorecourier]
+ \stoptypescript
+
+\stoptypescriptcollection
diff --git a/tex/context/fonts/mkiv/type-imp-texgyre.mkiv b/tex/context/fonts/mkiv/type-imp-texgyre.mkiv
index a7c2d06be..2bec4c2a8 100644
--- a/tex/context/fonts/mkiv/type-imp-texgyre.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-texgyre.mkiv
@@ -14,6 +14,10 @@
%D There are some weird ones that are never used .. all these Caps ... we can better
%D split them into pagella-caps etc.
+\definefontfeature
+ [mathcollapseitalics]
+ [collapseitalics=yes]
+
\starttypescriptcollection[texgyre]
\definetypescriptprefix [f:pagella] [pagella]
diff --git a/tex/context/fonts/mkiv/type-imp-xits.mkiv b/tex/context/fonts/mkiv/type-imp-xits.mkiv
index 145ddc7a8..d3504bdbb 100644
--- a/tex/context/fonts/mkiv/type-imp-xits.mkiv
+++ b/tex/context/fonts/mkiv/type-imp-xits.mkiv
@@ -23,12 +23,12 @@
\starttypescript [\s!math] [xits,xitsbidi] [\s!name]
\loadfontgoodies[xits-math]
- \definefontsynonym[\s!MathRoman ][\s!file:xits-math.otf] [\s!features={\s!math\mathsizesuffix,mathextra},\s!goodies=xits-math]
- \definefontsynonym[\s!MathRoman L2R][\s!file:xits-math.otf] [\s!features={\s!math\mathsizesuffix-l2r,mathextra},\s!goodies=xits-math]
- \definefontsynonym[\s!MathRoman R2L][\s!file:xits-math.otf] [\s!features={\s!math\mathsizesuffix-r2l,mathextra},\s!goodies=xits-math]
- \definefontsynonym[\s!MathRomanBold ][\s!file:xits-mathbold.otf][\s!features={\s!math\mathsizesuffix,mathextra},\s!goodies=xits-math]
- \definefontsynonym[\s!MathRomanBold L2R][\s!file:xits-mathbold.otf][\s!features={\s!math\mathsizesuffix-l2r,mathextra},\s!goodies=xits-math]
- \definefontsynonym[\s!MathRomanBold R2L][\s!file:xits-mathbold.otf][\s!features={\s!math\mathsizesuffix-r2l,mathextra},\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRoman ][\s!file:xitsmath-regular.otf] [\s!features={\s!math\mathsizesuffix,mathextra},\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRoman L2R][\s!file:xitsmath-regular.otf] [\s!features={\s!math\mathsizesuffix-l2r,mathextra},\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRoman R2L][\s!file:xitsmath-regular.otf] [\s!features={\s!math\mathsizesuffix-r2l,mathextra},\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRomanBold ][\s!file:xitsmath-bold.otf] [\s!features={\s!math\mathsizesuffix,mathextra},\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRomanBold L2R][\s!file:xitsmath-bold.otf] [\s!features={\s!math\mathsizesuffix-l2r,mathextra},\s!goodies=xits-math]
+ \definefontsynonym[\s!MathRomanBold R2L][\s!file:xitsmath-bold.otf] [\s!features={\s!math\mathsizesuffix-r2l,mathextra},\s!goodies=xits-math]
\stoptypescript
\starttypescript [\s!serif] [xits] [\s!name]
diff --git a/tex/context/fonts/mkiv/unifraktur.lfg b/tex/context/fonts/mkiv/unifraktur.lfg
index 32ffed928..4bae0be7e 100644
--- a/tex/context/fonts/mkiv/unifraktur.lfg
+++ b/tex/context/fonts/mkiv/unifraktur.lfg
@@ -1,6 +1,6 @@
-- moved to treatments.lfg
--
--- fonts.handlers.otf.enhancers.patches.register("after","check metadata","unifraktur*", function(data,filename)
+-- fonts.handlers.otf.enhancers.patches.register("before","check extra features","unifraktur*", function(data,filename)
-- data.metadata.pfminfo.os2_xheight = nil
-- end)
diff --git a/tex/context/fonts/mkiv/xits-math.lfg b/tex/context/fonts/mkiv/xits-math.lfg
index 372224940..b37ab1277 100644
--- a/tex/context/fonts/mkiv/xits-math.lfg
+++ b/tex/context/fonts/mkiv/xits-math.lfg
@@ -23,6 +23,16 @@ return {
comment = "Goodies that complement xits (by Khaled Hosny).",
author = "Hans Hagen",
copyright = "ConTeXt development team",
+ filenames = {
+ ["xitsmath-regular.otf"] = {
+ "xitsmath-regular.otf",
+ "xits-math.otf",
+ },
+ ["xitsmath-bold.otf"] = {
+ "xitsmath-bold.otf",
+ "xits-mathbold.otf",
+ },
+ },
mathematics = {
-- italics = {
-- ["xits-math"] = italics,
diff --git a/tex/context/interface/mkii/keys-cs.xml b/tex/context/interface/mkii/keys-cs.xml
index 9ce2a779a..521393c17 100644
--- a/tex/context/interface/mkii/keys-cs.xml
+++ b/tex/context/interface/mkii/keys-cs.xml
@@ -188,6 +188,7 @@
+
@@ -379,6 +380,7 @@
+
@@ -627,6 +629,10 @@
+
+
+
+
@@ -693,6 +699,7 @@
+
@@ -758,6 +765,7 @@
+
@@ -800,6 +808,7 @@
+
@@ -847,6 +856,7 @@
+
@@ -867,6 +877,7 @@
+
@@ -909,6 +920,7 @@
+
@@ -981,6 +993,7 @@
+
@@ -1000,6 +1013,7 @@
+
@@ -1069,6 +1083,7 @@
+
@@ -1111,6 +1126,7 @@
+
@@ -1153,7 +1169,10 @@
+
+
+
@@ -1166,7 +1185,9 @@
+
+
@@ -1231,6 +1252,7 @@
+
diff --git a/tex/context/interface/mkii/keys-de.xml b/tex/context/interface/mkii/keys-de.xml
index 404f8da89..f399f128a 100644
--- a/tex/context/interface/mkii/keys-de.xml
+++ b/tex/context/interface/mkii/keys-de.xml
@@ -188,6 +188,7 @@
+
@@ -379,6 +380,7 @@
+
@@ -627,6 +629,10 @@
+
+
+
+
@@ -693,6 +699,7 @@
+
@@ -758,6 +765,7 @@
+
@@ -800,6 +808,7 @@
+
@@ -847,6 +856,7 @@
+
@@ -867,6 +877,7 @@
+
@@ -909,6 +920,7 @@
+
@@ -981,6 +993,7 @@
+
@@ -1000,6 +1013,7 @@
+
@@ -1069,6 +1083,7 @@
+
@@ -1111,6 +1126,7 @@
+
@@ -1153,7 +1169,10 @@
+
+
+
@@ -1166,7 +1185,9 @@
+
+
@@ -1231,6 +1252,7 @@
+
diff --git a/tex/context/interface/mkii/keys-en.xml b/tex/context/interface/mkii/keys-en.xml
index a1c935db8..dccff3a98 100644
--- a/tex/context/interface/mkii/keys-en.xml
+++ b/tex/context/interface/mkii/keys-en.xml
@@ -188,6 +188,7 @@
+
@@ -379,6 +380,7 @@
+
@@ -627,6 +629,10 @@
+
+
+
+
@@ -693,6 +699,7 @@
+
@@ -758,6 +765,7 @@
+
@@ -800,6 +808,7 @@
+
@@ -847,6 +856,7 @@
+
@@ -867,6 +877,7 @@
+
@@ -909,6 +920,7 @@
+
@@ -981,6 +993,7 @@
+
@@ -1000,6 +1013,7 @@
+
@@ -1069,6 +1083,7 @@
+
@@ -1111,6 +1126,7 @@
+
@@ -1153,7 +1169,10 @@
+
+
+
@@ -1166,7 +1185,9 @@
+
+
@@ -1231,6 +1252,7 @@
+
diff --git a/tex/context/interface/mkii/keys-fr.xml b/tex/context/interface/mkii/keys-fr.xml
index db6e35ac6..ab256770c 100644
--- a/tex/context/interface/mkii/keys-fr.xml
+++ b/tex/context/interface/mkii/keys-fr.xml
@@ -188,6 +188,7 @@
+
@@ -379,6 +380,7 @@
+
@@ -627,6 +629,10 @@
+
+
+
+
@@ -693,6 +699,7 @@
+
@@ -758,6 +765,7 @@
+
@@ -800,6 +808,7 @@
+
@@ -847,6 +856,7 @@
+
@@ -867,6 +877,7 @@
+
@@ -909,6 +920,7 @@
+
@@ -981,6 +993,7 @@
+
@@ -1000,6 +1013,7 @@
+
@@ -1069,6 +1083,7 @@
+
@@ -1111,6 +1126,7 @@
+
@@ -1153,7 +1169,10 @@
+
+
+
@@ -1166,7 +1185,9 @@
+
+
@@ -1231,6 +1252,7 @@
+
diff --git a/tex/context/interface/mkii/keys-it.xml b/tex/context/interface/mkii/keys-it.xml
index 86d6868b4..da7970619 100644
--- a/tex/context/interface/mkii/keys-it.xml
+++ b/tex/context/interface/mkii/keys-it.xml
@@ -188,6 +188,7 @@
+
@@ -379,6 +380,7 @@
+
@@ -627,6 +629,10 @@
+
+
+
+
@@ -693,6 +699,7 @@
+
@@ -758,6 +765,7 @@
+
@@ -800,6 +808,7 @@
+
@@ -847,6 +856,7 @@
+
@@ -867,6 +877,7 @@
+
@@ -909,6 +920,7 @@
+
@@ -981,6 +993,7 @@
+
@@ -1000,6 +1013,7 @@
+
@@ -1069,6 +1083,7 @@
+
@@ -1111,6 +1126,7 @@
+
@@ -1153,7 +1169,10 @@
+
+
+
@@ -1166,7 +1185,9 @@
+
+
@@ -1231,6 +1252,7 @@
+
diff --git a/tex/context/interface/mkii/keys-nl.xml b/tex/context/interface/mkii/keys-nl.xml
index 1c6077d1e..212685d44 100644
--- a/tex/context/interface/mkii/keys-nl.xml
+++ b/tex/context/interface/mkii/keys-nl.xml
@@ -379,6 +379,7 @@
+
@@ -627,6 +628,10 @@
+
+
+
+
@@ -693,6 +698,7 @@
+
@@ -758,6 +764,7 @@
+
@@ -800,6 +807,7 @@
+
@@ -867,6 +875,7 @@
+
@@ -909,6 +918,7 @@
+
@@ -980,7 +990,8 @@
-
+
+
@@ -1000,6 +1011,7 @@
+
@@ -1111,6 +1123,7 @@
+
@@ -1153,6 +1166,7 @@
+
@@ -1166,7 +1180,9 @@
+
+
@@ -1231,6 +1247,7 @@
+
diff --git a/tex/context/interface/mkii/keys-pe.xml b/tex/context/interface/mkii/keys-pe.xml
index 91f778c5e..e587e1d6e 100644
--- a/tex/context/interface/mkii/keys-pe.xml
+++ b/tex/context/interface/mkii/keys-pe.xml
@@ -188,6 +188,7 @@
+
@@ -379,6 +380,7 @@
+
@@ -627,6 +629,10 @@
+
+
+
+
@@ -693,6 +699,7 @@
+
@@ -758,6 +765,7 @@
+
@@ -800,6 +808,7 @@
+
@@ -847,6 +856,7 @@
+
@@ -867,6 +877,7 @@
+
@@ -909,6 +920,7 @@
+
@@ -981,6 +993,7 @@
+
@@ -1000,6 +1013,7 @@
+
@@ -1069,6 +1083,7 @@
+
@@ -1111,6 +1126,7 @@
+
@@ -1153,7 +1169,10 @@
+
+
+
@@ -1166,7 +1185,9 @@
+
+
@@ -1231,6 +1252,7 @@
+
diff --git a/tex/context/interface/mkii/keys-ro.xml b/tex/context/interface/mkii/keys-ro.xml
index a4566c4b4..c0308086c 100644
--- a/tex/context/interface/mkii/keys-ro.xml
+++ b/tex/context/interface/mkii/keys-ro.xml
@@ -188,6 +188,7 @@
+
@@ -379,6 +380,7 @@
+
@@ -627,6 +629,10 @@
+
+
+
+
@@ -693,6 +699,7 @@
+
@@ -758,6 +765,7 @@
+
@@ -800,6 +808,7 @@
+
@@ -847,6 +856,7 @@
+
@@ -867,6 +877,7 @@
+
@@ -909,6 +920,7 @@
+
@@ -981,6 +993,7 @@
+
@@ -1000,6 +1013,7 @@
+
@@ -1069,6 +1083,7 @@
+
@@ -1111,6 +1126,7 @@
+
@@ -1153,7 +1169,10 @@
+
+
+
@@ -1166,7 +1185,9 @@
+
+
@@ -1231,6 +1252,7 @@
+
diff --git a/tex/context/interface/mkiv/context-en.xml b/tex/context/interface/mkiv/context-en.xml
index e8353dbdc..4cfb3c7fe 100644
--- a/tex/context/interface/mkiv/context-en.xml
+++ b/tex/context/interface/mkiv/context-en.xml
@@ -1,8 +1,587 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -19,7 +598,7 @@
-
+
@@ -36,7 +615,7 @@
-
+
@@ -49,7 +628,7 @@
-
+
@@ -121,9 +700,9 @@
-
+
-
+
@@ -131,25 +710,19 @@
-
-
-
-
+
-
+
-
+
-
-
-
-
+
-
+
@@ -163,18 +736,10 @@
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
@@ -183,25 +748,29 @@
+
+
+
+
-
+
-
+
-
+
-
+
@@ -214,7 +783,7 @@
-
+
@@ -227,12 +796,12 @@
-
+
-
+
@@ -242,46 +811,46 @@
-
+
-
+
-
+
-
-
+
+
-
+
-
+
-
+
@@ -295,7 +864,28 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -307,7 +897,20 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -324,7 +927,7 @@
-
+
@@ -337,7 +940,7 @@
-
+
@@ -369,7 +972,7 @@
-
+
@@ -382,14 +985,14 @@
-
+
-
+
@@ -400,12 +1003,12 @@
-
+
-
+
@@ -413,7 +1016,7 @@
-
+
@@ -434,7 +1037,7 @@
-
+
@@ -453,14 +1056,14 @@
-
+
-
+
@@ -473,7 +1076,7 @@
-
+
@@ -486,7 +1089,7 @@
-
+
@@ -501,7 +1104,7 @@
-
+
@@ -514,7 +1117,7 @@
-
+
@@ -584,9 +1187,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -610,118 +1223,38 @@
-
+
-
+
-
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
@@ -764,35 +1297,35 @@
-
+
-
+
-
+
-
+
-
+
@@ -814,9 +1347,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -859,7 +1407,7 @@
-
+
@@ -867,11 +1415,11 @@
-
-
+
+
-
+
@@ -884,7 +1432,7 @@
-
+
@@ -909,7 +1457,7 @@
-
+
@@ -917,39 +1465,37 @@
+
-
+
-
-
+
-
+
-
-
+
-
+
-
-
+
@@ -959,14 +1505,13 @@
-
+
-
-
+
@@ -976,14 +1521,13 @@
-
+
-
-
+
@@ -995,7 +1539,7 @@
-
+
@@ -1035,7 +1579,7 @@
-
+
@@ -1046,7 +1590,7 @@
-
+
@@ -1063,7 +1607,7 @@
-
+
@@ -1076,7 +1620,7 @@
-
+
@@ -1089,7 +1633,7 @@
-
+
@@ -1102,7 +1646,7 @@
-
+
@@ -1111,21 +1655,21 @@
-
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
@@ -1137,7 +1681,7 @@
-
+
@@ -1145,47 +1689,47 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
-
-
-
+
+
+
+
-
+
@@ -1204,7 +1748,7 @@
-
+
@@ -1217,7 +1761,7 @@
-
+
@@ -1230,7 +1774,7 @@
-
+
@@ -1243,7 +1787,7 @@
-
+
@@ -1256,7 +1800,7 @@
-
+
@@ -1264,7 +1808,7 @@
-
+
@@ -1272,7 +1816,7 @@
-
+
@@ -1280,7 +1824,7 @@
-
+
@@ -1288,7 +1832,7 @@
-
+
@@ -1296,7 +1840,7 @@
-
+
@@ -1304,7 +1848,7 @@
-
+
@@ -1312,7 +1856,7 @@
-
+
@@ -1320,7 +1864,7 @@
-
+
@@ -1328,7 +1872,7 @@
-
+
@@ -1336,27 +1880,27 @@
-
+
-
+
-
+
-
+
-
+
@@ -1364,7 +1908,7 @@
-
+
@@ -1372,7 +1916,7 @@
-
+
@@ -1380,7 +1924,7 @@
-
+
@@ -1393,7 +1937,7 @@
-
+
@@ -1401,11 +1945,11 @@
-
-
-
-
-
+
+
+
+
+
@@ -1417,7 +1961,7 @@
-
+
@@ -1425,7 +1969,7 @@
-
+
@@ -1440,8 +1984,8 @@
-
-
+
+
@@ -1449,7 +1993,7 @@
-
+
@@ -1457,7 +2001,7 @@
-
+
@@ -1465,7 +2009,7 @@
-
+
@@ -1473,7 +2017,7 @@
-
+
@@ -1481,7 +2025,7 @@
-
+
@@ -1489,7 +2033,7 @@
-
+
@@ -1497,47 +2041,47 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -1546,7 +2090,7 @@
-
+
@@ -1559,98 +2103,98 @@
-
+
-
-
+
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -1667,12 +2211,12 @@
-
+
-
+
@@ -1683,14 +2227,14 @@
-
+
-
+
@@ -1701,7 +2245,7 @@
-
+
@@ -1711,7 +2255,7 @@
-
+
@@ -1721,7 +2265,7 @@
-
+
@@ -1729,15 +2273,9 @@
-
-
-
-
-
-
-
+
@@ -1753,181 +2291,23 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
+
-
+
-
+
@@ -1935,7 +2315,7 @@
-
+
@@ -1943,7 +2323,7 @@
-
+
@@ -1951,7 +2331,7 @@
-
+
@@ -1959,13 +2339,16 @@
-
+
+
+
-
-
+
+
+
@@ -1973,135 +2356,35 @@
-
+
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
-
+
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -2109,7 +2392,7 @@
-
+
@@ -2117,7 +2400,7 @@
-
+
@@ -2125,7 +2408,7 @@
-
+
@@ -2133,7 +2416,7 @@
-
+
@@ -2141,7 +2424,7 @@
-
+
@@ -2149,7 +2432,102 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -2157,7 +2535,183 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -2165,7 +2719,7 @@
-
+
@@ -2173,7 +2727,7 @@
-
+
@@ -2181,7 +2735,7 @@
-
+
@@ -2189,7 +2743,7 @@
-
+
@@ -2197,17 +2751,17 @@
-
+
-
+
-
+
@@ -2217,7 +2771,7 @@
-
+
@@ -2226,10 +2780,114 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -2242,7 +2900,7 @@
-
+
@@ -2255,21 +2913,21 @@
-
+
-
+
-
+
@@ -2285,7 +2943,7 @@
-
+
@@ -2298,14 +2956,14 @@
-
+
-
+
@@ -2320,7 +2978,7 @@
-
+
@@ -2333,43 +2991,42 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
-
+
@@ -2379,7 +3036,7 @@
-
+
@@ -2389,7 +3046,7 @@
-
+
@@ -2405,28 +3062,28 @@
-
+
-
+
-
+
-
+
@@ -2436,10 +3093,10 @@
-
+
-
+
@@ -2447,7 +3104,7 @@
-
+
@@ -2457,9 +3114,55 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -2472,7 +3175,7 @@
-
+
@@ -2507,7 +3210,7 @@
-
+
@@ -2526,7 +3229,7 @@
-
+
@@ -2539,28 +3242,28 @@
-
+
-
+
-
+
-
+
@@ -2570,7 +3273,7 @@
-
+
@@ -2580,7 +3283,7 @@
-
+
@@ -2590,7 +3293,7 @@
-
+
@@ -2602,7 +3305,7 @@
-
+
@@ -2618,7 +3321,7 @@
-
+
@@ -2633,7 +3336,7 @@
-
+
@@ -2646,7 +3349,7 @@
-
+
@@ -2667,7 +3370,7 @@
-
+
@@ -2686,211 +3389,211 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
-
-
-
-
+
+
+
+
+
-
+
-
+
-
+
-
+
@@ -2900,7 +3603,7 @@
-
+
@@ -2910,16 +3613,16 @@
-
+
-
-
-
+
+
+
@@ -2929,7 +3632,7 @@
-
+
@@ -2941,7 +3644,7 @@
-
+
@@ -2953,21 +3656,21 @@
-
-
-
+
+
+
-
-
-
+
+
+
-
+
@@ -2977,46 +3680,50 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
@@ -3026,47 +3733,47 @@
-
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -3076,7 +3783,7 @@
-
+
@@ -3088,9 +3795,9 @@
-
-
-
+
+
+
@@ -3099,21 +3806,21 @@
-
+
-
+
-
+
@@ -3135,7 +3842,7 @@
-
+
@@ -3143,10 +3850,10 @@
-
+
-
+
@@ -3154,13 +3861,13 @@
-
-
+
+
-
-
-
+
+
+
@@ -3169,12 +3876,12 @@
-
+
-
+
@@ -3187,14 +3894,21 @@
-
+
-
+
+
+
+
+
+
+
+
@@ -3283,7 +3997,7 @@
-
+
@@ -3293,7 +4007,7 @@
-
+
@@ -3301,7 +4015,7 @@
-
+
@@ -3311,21 +4025,21 @@
-
+
-
+
-
+
@@ -3335,7 +4049,7 @@
-
+
@@ -3348,28 +4062,28 @@
-
+
-
+
-
+
-
+
@@ -3384,9 +4098,9 @@
-
-
-
+
+
+
@@ -3396,7 +4110,7 @@
-
+
@@ -3405,14 +4119,14 @@
-
+
-
+
@@ -3422,7 +4136,7 @@
-
+
@@ -3483,7 +4197,7 @@
-
+
@@ -3493,7 +4207,7 @@
-
+
@@ -3506,7 +4220,7 @@
-
+
@@ -3560,7 +4274,7 @@
-
+
@@ -3571,28 +4285,28 @@
-
+
-
+
-
+
-
+
@@ -3602,7 +4316,7 @@
-
+
@@ -3625,7 +4339,7 @@
-
+
@@ -3633,7 +4347,7 @@
-
+
@@ -3642,10 +4356,10 @@
-
-
-
-
+
+
+
+
@@ -3659,7 +4373,7 @@
-
+
@@ -3724,7 +4438,7 @@
-
+
@@ -3734,7 +4448,7 @@
-
+
@@ -3744,7 +4458,7 @@
-
+
@@ -3754,7 +4468,7 @@
-
+
@@ -3764,7 +4478,7 @@
-
+
@@ -3774,7 +4488,7 @@
-
+
@@ -3787,7 +4501,7 @@
-
+
@@ -3828,7 +4542,7 @@
-
+
@@ -3846,7 +4560,7 @@
-
+
@@ -3856,7 +4570,7 @@
-
+
@@ -3888,7 +4602,7 @@
-
+
@@ -3898,14 +4612,14 @@
-
+
-
+
@@ -3913,7 +4627,7 @@
-
+
@@ -3921,7 +4635,7 @@
-
+
@@ -3929,7 +4643,7 @@
-
+
@@ -3937,7 +4651,7 @@
-
+
@@ -3945,7 +4659,7 @@
-
+
@@ -3953,71 +4667,71 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
-
+
+
-
+
-
+
-
+
@@ -4065,15 +4779,15 @@
-
+
-
-
+
+
@@ -4085,7 +4799,7 @@
-
+
@@ -4095,22 +4809,22 @@
-
+
-
-
+
+
-
+
@@ -4123,8 +4837,8 @@
-
-
+
+
@@ -4143,7 +4857,7 @@
-
+
@@ -4156,7 +4870,7 @@
-
+
@@ -4169,7 +4883,7 @@
-
+
@@ -4179,77 +4893,77 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -4262,7 +4976,7 @@
-
+
@@ -4275,7 +4989,7 @@
-
+
@@ -4285,7 +4999,7 @@
-
+
@@ -4295,35 +5009,35 @@
-
+
-
+
-
+
-
+
-
+
@@ -4337,14 +5051,14 @@
-
+
-
+
@@ -4358,7 +5072,7 @@
-
+
@@ -4367,7 +5081,7 @@
-
+
@@ -4416,14 +5130,14 @@
-
+
-
+
@@ -4433,14 +5147,14 @@
-
+
-
+
@@ -4458,7 +5172,7 @@
-
+
@@ -4467,7 +5181,7 @@
-
+
@@ -4480,7 +5194,7 @@
-
+
@@ -4540,7 +5254,7 @@
-
+
@@ -4550,7 +5264,7 @@
-
+
@@ -4560,7 +5274,7 @@
-
+
@@ -4568,7 +5282,7 @@
-
+
@@ -4577,7 +5291,7 @@
-
+
@@ -4585,7 +5299,7 @@
-
+
@@ -4593,7 +5307,7 @@
-
+
@@ -4601,7 +5315,7 @@
-
+
@@ -4609,7 +5323,7 @@
-
+
@@ -4617,7 +5331,7 @@
-
+
@@ -4626,7 +5340,7 @@
-
+
@@ -4634,7 +5348,7 @@
-
+
@@ -4642,7 +5356,7 @@
-
+
@@ -4650,7 +5364,7 @@
-
+
@@ -4658,7 +5372,7 @@
-
+
@@ -4667,7 +5381,7 @@
-
+
@@ -4676,7 +5390,7 @@
-
+
@@ -4685,7 +5399,7 @@
-
+
@@ -4694,7 +5408,7 @@
-
+
@@ -4703,7 +5417,7 @@
-
+
@@ -4711,7 +5425,7 @@
-
+
@@ -4719,7 +5433,7 @@
-
+
@@ -4727,7 +5441,7 @@
-
+
@@ -4735,7 +5449,7 @@
-
+
@@ -4743,7 +5457,7 @@
-
+
@@ -4751,7 +5465,7 @@
-
+
@@ -4767,28 +5481,28 @@
-
+
-
+
-
+
-
+
@@ -4826,8 +5540,8 @@
-
-
+
+
@@ -4835,7 +5549,7 @@
-
+
@@ -4849,7 +5563,7 @@
-
+
@@ -4863,7 +5577,7 @@
-
+
@@ -4874,7 +5588,7 @@
-
+
@@ -4885,7 +5599,7 @@
-
+
@@ -4893,7 +5607,7 @@
-
+
@@ -4905,7 +5619,7 @@
-
+
@@ -4913,7 +5627,7 @@
-
+
@@ -4922,7 +5636,7 @@
-
+
@@ -4931,7 +5645,7 @@
-
+
@@ -4942,7 +5656,7 @@
-
+
@@ -4955,7 +5669,7 @@
-
+
@@ -5030,12 +5744,18 @@
+
+
+
+
+
+
-
+
-
+
@@ -5052,9 +5772,9 @@
-
+
-
+
@@ -5068,341 +5788,318 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -5415,7 +6112,7 @@
-
+
@@ -5428,7 +6125,7 @@
-
+
@@ -5441,7 +6138,7 @@
-
+
@@ -5454,7 +6151,7 @@
-
+
@@ -5464,7 +6161,7 @@
-
+
@@ -5474,7 +6171,7 @@
-
+
@@ -5517,7 +6214,7 @@
-
+
@@ -5535,14 +6232,14 @@
-
+
-
+
@@ -5551,7 +6248,7 @@
-
+
@@ -5564,7 +6261,7 @@
-
+
@@ -5580,7 +6277,7 @@
-
+
@@ -5593,7 +6290,7 @@
-
+
@@ -5606,7 +6303,7 @@
-
+
@@ -5683,7 +6380,7 @@
-
+
@@ -5693,7 +6390,7 @@
-
+
@@ -5706,7 +6403,7 @@
-
+
@@ -5719,7 +6416,7 @@
-
+
@@ -5735,7 +6432,7 @@
-
+
@@ -5751,7 +6448,7 @@
-
+
@@ -5765,91 +6462,91 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -5858,7 +6555,7 @@
-
+
@@ -5871,7 +6568,7 @@
-
+
@@ -5976,7 +6673,7 @@
-
+
@@ -5989,7 +6686,7 @@
-
+
@@ -6002,7 +6699,7 @@
-
+
@@ -6012,7 +6709,7 @@
-
+
@@ -6022,7 +6719,7 @@
-
+
@@ -6035,7 +6732,7 @@
-
+
@@ -6048,7 +6745,7 @@
-
+
@@ -6061,7 +6758,7 @@
-
+
@@ -6071,7 +6768,7 @@
-
+
@@ -6081,7 +6778,7 @@
-
+
@@ -6091,7 +6788,7 @@
-
+
@@ -6101,7 +6798,7 @@
-
+
@@ -6111,7 +6808,7 @@
-
+
@@ -6121,7 +6818,7 @@
-
+
@@ -6131,7 +6828,7 @@
-
+
@@ -6141,7 +6838,7 @@
-
+
@@ -6151,77 +6848,77 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -6231,7 +6928,7 @@
-
+
@@ -6241,7 +6938,7 @@
-
+
@@ -6251,7 +6948,7 @@
-
+
@@ -6261,7 +6958,7 @@
-
+
@@ -6271,7 +6968,7 @@
-
+
@@ -6281,7 +6978,7 @@
-
+
@@ -6297,7 +6994,7 @@
-
+
@@ -6311,7 +7008,7 @@
-
+
@@ -6324,7 +7021,7 @@
-
+
@@ -6337,7 +7034,7 @@
-
+
@@ -6347,7 +7044,7 @@
-
+
@@ -6357,7 +7054,7 @@
-
+
@@ -6371,7 +7068,7 @@
-
+
@@ -6387,7 +7084,7 @@
-
+
@@ -6403,7 +7100,7 @@
-
+
@@ -6419,7 +7116,7 @@
-
+
@@ -6432,7 +7129,7 @@
-
+
@@ -6445,7 +7142,7 @@
-
+
@@ -6460,7 +7157,7 @@
-
+
@@ -6477,7 +7174,7 @@
-
+
@@ -6486,7 +7183,7 @@
-
+
@@ -6497,7 +7194,7 @@
-
+
@@ -6510,7 +7207,7 @@
-
+
@@ -6597,9 +7294,9 @@
-
+
-
+
@@ -6615,9 +7312,9 @@
-
+
-
+
@@ -6635,7 +7332,7 @@
-
+
@@ -6646,7 +7343,7 @@
-
+
@@ -6659,7 +7356,7 @@
-
+
@@ -6670,7 +7367,7 @@
-
+
@@ -6683,113 +7380,30 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
-
+
-
+
+
-
+
@@ -6802,7 +7416,7 @@
-
+
@@ -6875,6 +7489,12 @@
+
+
+
+
+
+
@@ -6893,6 +7513,11 @@
+
+
+
+
+
@@ -6919,7 +7544,7 @@
-
+
@@ -6940,7 +7565,7 @@
-
+
@@ -6953,7 +7578,7 @@
-
+
@@ -6972,7 +7597,7 @@
-
+
@@ -6980,18 +7605,18 @@
-
-
+
+
-
+
-
+
@@ -7012,7 +7637,7 @@
-
+
@@ -7033,7 +7658,7 @@
-
+
@@ -7049,7 +7674,7 @@
-
+
@@ -7071,7 +7696,7 @@
-
+
@@ -7089,19 +7714,23 @@
+
+
+
+
-
+
-
-
-
+
+
+
@@ -7109,7 +7738,7 @@
-
+
@@ -7119,10 +7748,10 @@
-
-
-
-
+
+
+
+
@@ -7130,7 +7759,7 @@
-
+
@@ -7138,29 +7767,29 @@
-
+
-
+
-
+
-
-
+
+
@@ -7168,7 +7797,7 @@
-
+
@@ -7176,29 +7805,29 @@
-
+
-
+
-
+
-
-
+
+
@@ -7206,7 +7835,7 @@
-
+
@@ -7214,29 +7843,29 @@
-
+
-
+
-
+
-
-
+
+
@@ -7244,7 +7873,7 @@
-
+
@@ -7252,36 +7881,36 @@
-
+
-
+
-
+
-
-
+
+
-
+
@@ -7308,14 +7937,21 @@
-
+
-
+
+
+
+
+
+
+
+
@@ -7324,7 +7960,7 @@
-
+
@@ -7333,21 +7969,21 @@
-
+
-
+
-
+
@@ -7357,7 +7993,7 @@
-
+
@@ -7365,7 +8001,7 @@
-
+
@@ -7375,14 +8011,14 @@
-
+
-
+
@@ -7391,7 +8027,7 @@
-
+
@@ -7404,7 +8040,7 @@
-
+
@@ -7431,19 +8067,19 @@
-
+
-
+
-
+
@@ -7453,7 +8089,7 @@
-
+
@@ -7466,7 +8102,7 @@
-
+
@@ -7586,6 +8222,12 @@
+
+
+
+
+
+
@@ -7604,6 +8246,11 @@
+
+
+
+
+
@@ -7631,7 +8278,17 @@
-
+
+
+
+
+
+
+
+
+
+
+
@@ -7652,7 +8309,7 @@
-
+
@@ -7665,7 +8322,7 @@
-
+
@@ -7684,7 +8341,7 @@
-
+
@@ -7701,7 +8358,7 @@
-
+
@@ -7751,14 +8408,14 @@
-
+
-
+
@@ -7771,7 +8428,7 @@
-
+
@@ -7886,7 +8543,7 @@
-
+
@@ -7899,7 +8556,7 @@
-
+
@@ -7912,7 +8569,7 @@
-
+
@@ -7925,7 +8582,7 @@
-
+
@@ -7938,7 +8595,7 @@
-
+
@@ -7948,7 +8605,7 @@
-
+
@@ -7958,7 +8615,7 @@
-
+
@@ -7971,7 +8628,7 @@
-
+
@@ -7984,7 +8641,7 @@
-
+
@@ -8009,7 +8666,7 @@
-
+
@@ -8022,7 +8679,7 @@
-
+
@@ -8038,7 +8695,7 @@
-
+
@@ -8048,7 +8705,7 @@
-
+
@@ -8056,7 +8713,7 @@
-
+
@@ -8066,7 +8723,7 @@
-
+
@@ -8076,7 +8733,7 @@
-
+
@@ -8087,7 +8744,7 @@
-
+
@@ -8116,7 +8773,7 @@
-
+
@@ -8139,7 +8796,7 @@
-
+
@@ -8159,7 +8816,7 @@
-
+
@@ -8169,7 +8826,7 @@
-
+
@@ -8182,7 +8839,7 @@
-
+
@@ -8197,7 +8854,7 @@
-
+
@@ -8210,7 +8867,7 @@
-
+
@@ -8219,7 +8876,7 @@
-
+
@@ -8228,7 +8885,7 @@
-
+
@@ -8240,7 +8897,7 @@
-
+
@@ -8249,21 +8906,21 @@
-
+
-
+
-
+
@@ -8273,7 +8930,7 @@
-
+
@@ -8283,7 +8940,7 @@
-
+
@@ -8296,7 +8953,7 @@
-
+
@@ -8309,24 +8966,24 @@
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -8339,7 +8996,7 @@
-
+
@@ -8352,7 +9009,7 @@
-
+
@@ -8365,7 +9022,7 @@
-
+
@@ -8378,7 +9035,7 @@
-
+
@@ -8391,7 +9048,7 @@
-
+
@@ -8404,7 +9061,7 @@
-
+
@@ -8417,7 +9074,7 @@
-
+
@@ -8430,21 +9087,21 @@
-
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
@@ -8457,7 +9114,7 @@
-
+
@@ -8470,45 +9127,45 @@
-
+
-
+
-
+
-
+
-
-
-
+
+
+
-
-
+
+
@@ -8518,91 +9175,98 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
+
+
+
+
+
+
+
-
+
@@ -8615,7 +9279,7 @@
-
+
@@ -8628,7 +9292,7 @@
-
+
@@ -8641,7 +9305,7 @@
-
+
@@ -8657,7 +9321,7 @@
-
+
@@ -8673,7 +9337,7 @@
-
+
@@ -8686,14 +9350,14 @@
-
+
-
+
@@ -8706,7 +9370,7 @@
-
+
@@ -8719,7 +9383,7 @@
-
+
@@ -8732,7 +9396,7 @@
-
+
@@ -8745,14 +9409,14 @@
-
+
-
+
@@ -8762,7 +9426,7 @@
-
+
@@ -8772,7 +9436,7 @@
-
+
@@ -8781,7 +9445,7 @@
-
+
@@ -8794,7 +9458,7 @@
-
+
@@ -8861,7 +9525,7 @@
-
+
@@ -8873,7 +9537,7 @@
-
+
@@ -8886,7 +9550,7 @@
-
+
@@ -8899,7 +9563,7 @@
-
+
@@ -8955,7 +9619,7 @@
-
+
@@ -8965,7 +9629,7 @@
-
+
@@ -8976,7 +9640,7 @@
-
+
@@ -8986,7 +9650,7 @@
-
+
@@ -9013,7 +9677,7 @@
-
+
@@ -9022,7 +9686,7 @@
-
+
@@ -9061,7 +9725,7 @@
-
+
@@ -9074,7 +9738,7 @@
-
+
@@ -9089,7 +9753,7 @@
-
+
@@ -9102,7 +9766,7 @@
-
+
@@ -9126,14 +9790,14 @@
-
+
-
+
@@ -9143,7 +9807,7 @@
-
+
@@ -9156,7 +9820,7 @@
-
+
@@ -9177,14 +9841,14 @@
-
+
-
+
@@ -9194,9 +9858,9 @@
-
+
-
+
@@ -9208,28 +9872,14 @@
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
+
@@ -9238,12 +9888,13 @@
-
+
+
@@ -9258,7 +9909,7 @@
-
+
@@ -9278,7 +9929,7 @@
-
+
@@ -9319,7 +9970,7 @@
-
+
@@ -9385,6 +10036,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -9412,6 +10083,18 @@
+
+
+
+
+
+
+
+
+
+
+
+
@@ -9438,7 +10121,17 @@
-
+
+
+
+
+
+
+
+
+
+
+
@@ -9473,6 +10166,8 @@
+
+
@@ -9497,6 +10192,9 @@
+
+
+
@@ -9552,7 +10250,7 @@
-
+
@@ -9562,90 +10260,22 @@
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
-
-
-
@@ -9698,10 +10328,88 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -9712,7 +10420,7 @@
-
+
@@ -9783,10 +10491,10 @@
-
-
+
+
@@ -9806,9 +10514,95 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -9864,10 +10658,10 @@
-
-
+
+
@@ -9876,37 +10670,58 @@
+
+
+
+
+
+
+
-
+
-
+
+
+
+
+
+
+
+
-
+
-
+
+
+
+
+
+
+
+
-
+
-
+
@@ -9916,7 +10731,7 @@
-
+
@@ -9926,7 +10741,7 @@
-
+
@@ -9936,7 +10751,7 @@
-
+
@@ -9946,7 +10761,7 @@
-
+
@@ -9956,7 +10771,7 @@
-
+
@@ -9966,7 +10781,7 @@
-
+
@@ -9981,20 +10796,20 @@
-
-
-
-
-
-
+
+
+
+
+
+
-
-
+
+
@@ -10016,7 +10831,7 @@
-
+
@@ -10027,14 +10842,14 @@
-
+
-
+
@@ -10044,7 +10859,7 @@
-
+
@@ -10101,10 +10916,10 @@
-
-
+
+
@@ -10117,7 +10932,7 @@
-
+
@@ -10184,7 +10999,7 @@
-
+
@@ -10218,7 +11033,7 @@
-
+
@@ -10301,7 +11116,7 @@
-
+
@@ -10335,7 +11150,7 @@
-
+
@@ -10345,7 +11160,7 @@
-
+
@@ -10357,7 +11172,7 @@
-
+
@@ -10386,10 +11201,16 @@
+
+
+
+
+
+
-
+
@@ -10399,15 +11220,20 @@
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -10503,6 +11329,10 @@
+
+
+
+
@@ -10583,10 +11413,14 @@
+
+
+
+
-
+
@@ -10596,7 +11430,7 @@
-
+
@@ -10606,72 +11440,72 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
-
-
+
+
+
-
+
@@ -10688,12 +11522,12 @@
-
+
-
+
@@ -10710,12 +11544,12 @@
-
+
-
+
@@ -10728,7 +11562,7 @@
-
+
@@ -10741,35 +11575,35 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -10777,7 +11611,7 @@
-
+
@@ -10787,7 +11621,7 @@
-
+
@@ -10812,7 +11646,7 @@
-
+
@@ -10837,14 +11671,14 @@
-
+
-
+
@@ -10852,7 +11686,7 @@
-
+
@@ -10865,7 +11699,7 @@
-
+
@@ -10878,14 +11712,14 @@
-
+
-
+
@@ -10895,37 +11729,37 @@
-
+
-
+
-
-
+
+
-
+
-
-
+
+
@@ -10933,14 +11767,14 @@
-
+
-
+
@@ -10950,7 +11784,7 @@
-
+
@@ -10960,21 +11794,21 @@
-
+
-
+
-
+
@@ -10987,7 +11821,7 @@
-
+
@@ -11000,7 +11834,7 @@
-
+
@@ -11010,7 +11844,7 @@
-
+
@@ -11079,29 +11913,29 @@
-
+
-
-
+
+
-
+
-
-
-
+
+
+
@@ -11161,7 +11995,7 @@
-
+
@@ -11183,7 +12017,7 @@
-
+
@@ -11193,7 +12027,7 @@
-
+
@@ -11225,14 +12059,14 @@
-
+
-
+
@@ -11244,40 +12078,48 @@
-
+
-
+
-
+
-
+
-
+
-
+
+
+
+
+
+
+
+
+
-
+
@@ -11299,12 +12141,12 @@
-
+
-
+
@@ -11314,11 +12156,11 @@
-
-
-
-
-
+
+
+
+
+
@@ -11328,28 +12170,28 @@
-
+
-
+
-
+
-
+
@@ -11359,7 +12201,7 @@
-
+
@@ -11386,88 +12228,87 @@
-
+
-
+
-
+
-
-
+
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -11480,7 +12321,7 @@
-
+
@@ -11511,22 +12352,22 @@
-
+
-
-
+
+
-
+
@@ -11539,16 +12380,26 @@
-
+
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -11561,7 +12412,7 @@
-
+
@@ -11580,14 +12431,14 @@
-
+
-
+
-
+
-
+
@@ -11595,7 +12446,7 @@
-
+
@@ -11603,14 +12454,14 @@
-
+
-
+
@@ -11621,7 +12472,7 @@
-
+
@@ -11631,7 +12482,7 @@
-
+
@@ -11658,14 +12509,14 @@
-
+
-
+
@@ -11675,7 +12526,7 @@
-
+
@@ -11685,28 +12536,28 @@
-
+
-
+
-
+
-
+
@@ -11716,7 +12567,7 @@
-
+
@@ -11726,7 +12577,7 @@
-
+
@@ -11737,7 +12588,7 @@
-
+
@@ -11748,34 +12599,50 @@
-
+
-
+
+
+
+
+
+
+
+
+
-
+
-
+
+
+
+
+
+
+
+
+
-
+
-
+
@@ -11791,7 +12658,7 @@
-
+
@@ -11807,15 +12674,15 @@
-
+
-
-
+
+
@@ -11828,7 +12695,7 @@
-
+
@@ -11841,7 +12708,7 @@
-
+
@@ -11858,23 +12725,23 @@
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
+
@@ -11882,7 +12749,7 @@
-
+
@@ -11907,15 +12774,15 @@
-
+
-
-
+
+
@@ -11925,7 +12792,7 @@
-
+
@@ -11944,7 +12811,7 @@
-
+
@@ -11954,14 +12821,14 @@
-
+
-
+
@@ -12013,7 +12880,7 @@
-
+
@@ -12037,7 +12904,7 @@
-
+
@@ -12058,16 +12925,23 @@
-
+
-
-
+
+
+
+
+
+
+
+
+
-
+
@@ -12084,7 +12958,7 @@
-
+
@@ -12097,7 +12971,7 @@
-
+
@@ -12116,6 +12990,11 @@
+
+
+
+
+
@@ -12153,6 +13032,11 @@
+
+
+
+
+
@@ -12181,11 +13065,41 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
@@ -12198,7 +13112,7 @@
-
+
@@ -12212,9 +13126,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -12223,6 +13147,11 @@
+
+
+
+
+
@@ -12233,62 +13162,50 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
-
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -12301,20 +13218,25 @@
-
+
+
+
+
+
+
-
-
+
+
@@ -12323,7 +13245,7 @@
-
+
@@ -12332,7 +13254,7 @@
-
+
@@ -12344,7 +13266,7 @@
-
+
@@ -12353,7 +13275,7 @@
-
+
@@ -12370,7 +13292,7 @@
-
+
@@ -12379,7 +13301,7 @@
-
+
@@ -12391,7 +13313,7 @@
-
+
@@ -12400,17 +13322,20 @@
-
+
-
+
+
+
+
-
+
@@ -12423,7 +13348,7 @@
-
+
@@ -12475,12 +13400,33 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
@@ -12497,73 +13443,19 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -12585,7 +13477,7 @@
-
+
@@ -12598,7 +13490,7 @@
-
+
@@ -12794,21 +13686,21 @@
-
+
-
+
-
+
@@ -12816,9 +13708,9 @@
-
+
-
+
@@ -12831,7 +13723,7 @@
-
+
@@ -12839,7 +13731,7 @@
-
+
@@ -12849,7 +13741,7 @@
-
+
@@ -12860,7 +13752,7 @@
-
+
@@ -12871,7 +13763,7 @@
-
+
@@ -12879,7 +13771,7 @@
-
+
@@ -12890,7 +13782,7 @@
-
+
@@ -12901,15 +13793,7 @@
-
-
-
-
-
-
-
-
-
+
@@ -12917,7 +13801,7 @@
-
+
@@ -12930,7 +13814,7 @@
-
+
@@ -12940,9 +13824,9 @@
-
+
-
+
@@ -12956,31 +13840,7 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -12993,7 +13853,7 @@
-
+
@@ -13003,7 +13863,7 @@
-
+
@@ -13016,7 +13876,7 @@
-
+
@@ -13059,9 +13919,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -13073,9 +13943,9 @@
-
+
-
+
@@ -13092,28 +13962,7 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
@@ -13126,7 +13975,7 @@
-
+
@@ -13158,7 +14007,7 @@
-
+
@@ -13166,7 +14015,7 @@
-
+
@@ -13176,7 +14025,7 @@
-
+
@@ -13186,7 +14035,7 @@
-
+
@@ -13196,7 +14045,7 @@
-
+
@@ -13207,8 +14056,94 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -13221,7 +14156,7 @@
-
+
@@ -13373,9 +14308,6 @@
-
-
-
@@ -13431,7 +14363,7 @@
-
+
@@ -13447,7 +14379,7 @@
-
+
@@ -13460,7 +14392,7 @@
-
+
@@ -13470,16 +14402,16 @@
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
@@ -13489,7 +14421,7 @@
-
+
@@ -13502,7 +14434,7 @@
-
+
@@ -13515,7 +14447,7 @@
-
+
@@ -13528,7 +14460,7 @@
-
+
@@ -13536,42 +14468,42 @@
-
+
-
+
-
+
-
+
-
+
-
+
@@ -13581,35 +14513,35 @@
-
+
-
+
-
+
-
+
-
+
@@ -13619,14 +14551,14 @@
-
+
-
+
@@ -13652,7 +14584,7 @@
-
+
@@ -13675,7 +14607,7 @@
-
+
@@ -13686,7 +14618,7 @@
-
+
@@ -13699,7 +14631,7 @@
-
+
@@ -13714,7 +14646,7 @@
-
+
@@ -13730,7 +14662,7 @@
-
+
@@ -13748,7 +14680,7 @@
-
+
@@ -13766,7 +14698,7 @@
-
+
@@ -13803,7 +14735,7 @@
-
+
@@ -13828,13 +14760,18 @@
+
+
+
+
+
-
+
@@ -13843,7 +14780,7 @@
-
+
@@ -13868,12 +14805,17 @@
+
+
+
+
+
-
+
@@ -13881,7 +14823,7 @@
-
+
@@ -13906,24 +14848,29 @@
+
+
+
+
+
-
+
-
+
-
-
+
+
@@ -13980,7 +14927,7 @@
-
+
@@ -13989,7 +14936,7 @@
-
+
@@ -14002,7 +14949,7 @@
-
+
@@ -14012,9 +14959,9 @@
-
+
-
+
@@ -14031,9 +14978,9 @@
-
+
-
+
@@ -14044,7 +14991,7 @@
-
+
@@ -14054,7 +15001,7 @@
-
+
@@ -14064,8 +15011,8 @@
-
-
+
+
@@ -14075,7 +15022,7 @@
-
+
@@ -14088,7 +15035,7 @@
-
+
@@ -14097,7 +15044,7 @@
-
+
@@ -14110,7 +15057,7 @@
-
+
@@ -14130,7 +15077,7 @@
-
+
@@ -14140,7 +15087,7 @@
-
+
@@ -14150,14 +15097,14 @@
-
+
-
+
@@ -14169,7 +15116,7 @@
-
+
@@ -14182,7 +15129,7 @@
-
+
@@ -14208,22 +15155,22 @@
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
+
+
+
+
@@ -14232,30 +15179,30 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+
@@ -14265,7 +15212,7 @@
-
+
@@ -14275,7 +15222,7 @@
-
+
@@ -14285,7 +15232,7 @@
-
+
@@ -14295,7 +15242,7 @@
-
+
@@ -14305,7 +15252,7 @@
-
+
@@ -14314,7 +15261,7 @@
-
+
@@ -14329,7 +15276,7 @@
-
+
@@ -14342,7 +15289,7 @@
-
+
@@ -14397,7 +15344,7 @@
-
+
@@ -14405,8 +15352,8 @@
-
-
+
+
@@ -14416,7 +15363,7 @@
-
+
@@ -14426,7 +15373,7 @@
-
+
@@ -14436,7 +15383,7 @@
-
+
@@ -14446,7 +15393,7 @@
-
+
@@ -14456,14 +15403,14 @@
-
+
-
+
@@ -14504,7 +15451,7 @@
-
+
@@ -14512,76 +15459,76 @@
-
-
+
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -14590,7 +15537,7 @@
-
+
@@ -14600,7 +15547,7 @@
-
+
@@ -14610,7 +15557,7 @@
-
+
@@ -14655,7 +15602,7 @@
-
+
@@ -14665,10 +15612,10 @@
-
+
-
+
@@ -14694,7 +15641,7 @@
-
+
@@ -14718,16 +15665,16 @@
-
-
+
+
-
-
-
-
+
+
+
+
@@ -14736,16 +15683,16 @@
-
+
-
-
+
+
-
+
@@ -14758,7 +15705,7 @@
-
+
@@ -14804,12 +15751,12 @@
-
+
-
+
@@ -14818,7 +15765,7 @@
-
+
@@ -14831,7 +15778,7 @@
-
+
@@ -14882,14 +15829,14 @@
-
+
-
+
@@ -14901,42 +15848,42 @@
-
+
-
+
-
+
-
+
-
+
-
+
@@ -14949,7 +15896,7 @@
-
+
@@ -14958,7 +15905,7 @@
-
+
@@ -14971,7 +15918,7 @@
-
+
@@ -14987,7 +15934,7 @@
-
+
@@ -15000,7 +15947,7 @@
-
+
@@ -15015,7 +15962,7 @@
-
+
@@ -15028,7 +15975,7 @@
-
+
@@ -15126,14 +16073,14 @@
-
+
-
+
@@ -15141,7 +16088,7 @@
-
+
@@ -15149,7 +16096,7 @@
-
+
@@ -15159,7 +16106,7 @@
-
+
@@ -15171,7 +16118,7 @@
-
+
@@ -15184,7 +16131,7 @@
-
+
@@ -15247,7 +16194,7 @@
-
+
@@ -15257,7 +16204,7 @@
-
+
@@ -15271,7 +16218,7 @@
-
+
@@ -15284,7 +16231,7 @@
-
+
@@ -15395,28 +16342,28 @@
-
+
-
+
-
+
-
+
@@ -15426,7 +16373,7 @@
-
+
@@ -15442,7 +16389,7 @@
-
+
@@ -15455,7 +16402,7 @@
-
+
@@ -15467,7 +16414,7 @@
-
+
@@ -15508,10 +16455,15 @@
+
+
+
+
+
@@ -15519,7 +16471,7 @@
-
+
@@ -15528,10 +16480,10 @@
-
-
-
-
+
+
+
+
@@ -15539,7 +16491,7 @@
-
+
@@ -15549,7 +16501,7 @@
-
+
@@ -15559,7 +16511,7 @@
-
+
@@ -15569,7 +16521,7 @@
-
+
@@ -15579,28 +16531,28 @@
-
+
-
+
-
+
-
+
@@ -15615,7 +16567,7 @@
-
+
@@ -15626,8 +16578,8 @@
-
-
+
+
@@ -15638,7 +16590,7 @@
-
+
@@ -15655,7 +16607,7 @@
-
+
@@ -15672,7 +16624,7 @@
-
+
@@ -15686,7 +16638,7 @@
-
+
@@ -15696,28 +16648,28 @@
-
+
-
+
-
+
-
+
@@ -15731,7 +16683,7 @@
-
+
@@ -15741,7 +16693,7 @@
-
+
@@ -15751,7 +16703,7 @@
-
+
@@ -15761,7 +16713,7 @@
-
+
@@ -15773,7 +16725,7 @@
-
+
@@ -15786,7 +16738,7 @@
-
+
@@ -15826,7 +16778,7 @@
-
+
@@ -15841,14 +16793,14 @@
-
+
-
+
@@ -15858,7 +16810,7 @@
-
+
@@ -15873,14 +16825,14 @@
-
+
-
+
@@ -15890,7 +16842,7 @@
-
+
@@ -15905,21 +16857,21 @@
-
+
-
+
-
+
@@ -15934,21 +16886,21 @@
-
+
-
+
-
+
@@ -15961,7 +16913,7 @@
-
+
@@ -16001,7 +16953,7 @@
-
+
@@ -16016,14 +16968,14 @@
-
+
-
+
@@ -16033,7 +16985,7 @@
-
+
@@ -16048,14 +17000,14 @@
-
+
-
+
@@ -16065,7 +17017,7 @@
-
+
@@ -16080,21 +17032,21 @@
-
+
-
+
-
+
@@ -16109,21 +17061,21 @@
-
+
-
+
-
+
@@ -16132,9 +17084,9 @@
-
-
-
+
+
+
@@ -16146,7 +17098,7 @@
-
+
@@ -16159,7 +17111,7 @@
-
+
@@ -16190,6 +17142,7 @@
+
@@ -16361,11 +17314,14 @@
+
+
+
-
+
@@ -16564,11 +17520,14 @@
+
+
+
-
+
@@ -16579,7 +17538,7 @@
-
+
@@ -16600,7 +17559,7 @@
-
+
@@ -16616,7 +17575,7 @@
-
+
@@ -16629,29 +17588,29 @@
-
+
-
-
+
+
-
+
-
+
@@ -16661,7 +17620,7 @@
-
+
@@ -16671,7 +17630,7 @@
-
+
@@ -16683,7 +17642,7 @@
-
+
@@ -16693,7 +17652,7 @@
-
+
@@ -16706,7 +17665,7 @@
-
+
@@ -16766,7 +17725,7 @@
-
+
@@ -16778,7 +17737,7 @@
-
+
@@ -16791,7 +17750,7 @@
-
+
@@ -16805,7 +17764,7 @@
-
+
@@ -16813,7 +17772,7 @@
-
+
@@ -16821,7 +17780,7 @@
-
+
@@ -16833,7 +17792,7 @@
-
+
@@ -16846,7 +17805,7 @@
-
+
@@ -16872,7 +17831,7 @@
-
+
@@ -16880,8 +17839,8 @@
-
-
+
+
@@ -16894,7 +17853,7 @@
-
+
@@ -16904,7 +17863,7 @@
-
+
@@ -16914,7 +17873,7 @@
-
+
@@ -16924,7 +17883,7 @@
-
+
@@ -16933,7 +17892,7 @@
-
+
@@ -16946,7 +17905,7 @@
-
+
@@ -17067,6 +18026,12 @@
+
+
+
+
+
+
@@ -17117,7 +18082,7 @@
-
+
@@ -17130,7 +18095,7 @@
-
+
@@ -17146,7 +18111,7 @@
-
+
@@ -17156,7 +18121,7 @@
-
+
@@ -17172,9 +18137,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -17190,9 +18165,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -17208,9 +18193,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -17223,16 +18218,36 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -17242,9 +18257,19 @@
-
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -17257,9 +18282,13 @@
-
+
+
+
+
+
-
+
@@ -17270,9 +18299,13 @@
-
+
+
+
+
+
-
+
@@ -17283,11 +18316,15 @@
-
+
+
+
+
+
-
+
@@ -17297,7 +18334,7 @@
-
+
@@ -17307,7 +18344,7 @@
-
+
@@ -17349,6 +18386,12 @@
+
+
+
+
+
+
@@ -17410,7 +18453,7 @@
-
+
@@ -17423,7 +18466,7 @@
-
+
@@ -17436,7 +18479,7 @@
-
+
@@ -17449,7 +18492,7 @@
-
+
@@ -17462,40 +18505,40 @@
-
+
-
+
-
+
-
+
-
+
-
-
-
+
+
+
@@ -17507,7 +18550,7 @@
-
+
@@ -17524,7 +18567,7 @@
-
+
@@ -17538,7 +18581,7 @@
-
+
@@ -17555,7 +18598,7 @@
-
+
@@ -17569,7 +18612,7 @@
-
+
@@ -17581,7 +18624,7 @@
-
+
@@ -17609,7 +18652,7 @@
-
+
@@ -17622,7 +18665,7 @@
-
+
@@ -17752,42 +18795,42 @@
-
+
-
+
-
+
-
+
-
+
-
+
@@ -17804,7 +18847,7 @@
-
+
@@ -17814,7 +18857,7 @@
-
+
@@ -17824,7 +18867,7 @@
-
+
@@ -17837,7 +18880,7 @@
-
+
@@ -17850,9 +18893,9 @@
-
-
-
+
+
+
@@ -17866,7 +18909,7 @@
-
+
@@ -17879,7 +18922,7 @@
-
+
@@ -17892,7 +18935,7 @@
-
+
@@ -18090,7 +19133,7 @@
-
+
@@ -18098,7 +19141,7 @@
-
+
@@ -18111,7 +19154,7 @@
-
+
@@ -18124,7 +19167,7 @@
-
+
@@ -18140,14 +19183,14 @@
-
+
-
+
@@ -18157,7 +19200,7 @@
-
+
@@ -18167,7 +19210,7 @@
-
+
@@ -18180,7 +19223,7 @@
-
+
@@ -18193,7 +19236,7 @@
-
+
@@ -18203,7 +19246,7 @@
-
+
@@ -18216,7 +19259,7 @@
-
+
@@ -18234,7 +19277,7 @@
-
+
@@ -18251,7 +19294,7 @@
-
+
@@ -18268,11 +19311,31 @@
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -18285,7 +19348,7 @@
-
+
@@ -18362,7 +19425,7 @@
-
+
@@ -18386,7 +19449,7 @@
-
+
@@ -18397,28 +19460,28 @@
-
+
-
+
-
+
-
+
@@ -18431,7 +19494,7 @@
-
+
@@ -18444,7 +19507,7 @@
-
+
@@ -18454,7 +19517,7 @@
-
+
@@ -18463,34 +19526,34 @@
-
-
+
+
-
+
-
+
-
+
-
+
@@ -18503,7 +19566,7 @@
-
+
@@ -18553,7 +19616,7 @@
-
+
@@ -18562,9 +19625,11 @@
-
+
+
+
-
+
@@ -18573,7 +19638,7 @@
-
+
@@ -18665,24 +19730,24 @@
-
+
-
-
-
-
+
+
+
+
-
+
@@ -18691,7 +19756,7 @@
-
+
@@ -18701,10 +19766,10 @@
-
+
-
+
@@ -18717,7 +19782,7 @@
-
+
@@ -18978,7 +20043,7 @@
-
+
@@ -18991,7 +20056,7 @@
-
+
@@ -19004,7 +20069,7 @@
-
+
@@ -19017,7 +20082,7 @@
-
+
@@ -19032,7 +20097,7 @@
-
+
@@ -19048,7 +20113,7 @@
-
+
@@ -19058,7 +20123,7 @@
-
+
@@ -19068,7 +20133,7 @@
-
+
@@ -19078,7 +20143,7 @@
-
+
@@ -19091,7 +20156,7 @@
-
+
@@ -19101,7 +20166,7 @@
-
+
@@ -19111,7 +20176,7 @@
-
+
@@ -19121,9 +20186,11 @@
-
+
+
+
-
+
@@ -19133,9 +20200,11 @@
-
+
+
+
-
+
@@ -19145,23 +20214,25 @@
-
+
+
+
-
+
-
+
-
+
@@ -19171,7 +20242,7 @@
-
+
@@ -19181,7 +20252,7 @@
-
+
@@ -19191,7 +20262,7 @@
-
+
@@ -19201,7 +20272,7 @@
-
+
@@ -19214,7 +20285,7 @@
-
+
@@ -19229,7 +20300,7 @@
-
+
@@ -19242,7 +20313,7 @@
-
+
@@ -19275,7 +20346,7 @@
-
+
@@ -19285,14 +20356,14 @@
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
@@ -19302,8 +20373,8 @@
-
-
+
+
@@ -19312,58 +20383,58 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -19376,7 +20447,7 @@
-
+
@@ -19398,21 +20469,23 @@
-
+
-
+
+
+
-
+
-
+
@@ -19425,7 +20498,7 @@
-
+
@@ -19447,21 +20520,23 @@
-
+
-
+
+
+
-
+
-
+
@@ -19474,7 +20549,7 @@
-
+
@@ -19499,7 +20574,7 @@
-
+
@@ -19512,7 +20587,7 @@
-
+
@@ -19522,7 +20597,7 @@
-
+
@@ -19532,7 +20607,7 @@
-
+
@@ -19545,7 +20620,7 @@
-
+
@@ -19567,7 +20642,7 @@
-
+
@@ -19577,14 +20652,14 @@
-
+
-
+
@@ -19593,127 +20668,127 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
-
+
+
@@ -19723,32 +20798,32 @@
-
+
-
-
-
-
-
+
+
+
+
+
-
+
-
+
@@ -19756,52 +20831,52 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -19814,7 +20889,7 @@
-
+
@@ -19906,7 +20981,8 @@
-
+
+
@@ -19914,7 +20990,7 @@
-
+
@@ -19924,7 +21000,7 @@
-
+
@@ -19934,11 +21010,16 @@
-
+
+
+
+
+
+
-
+
@@ -19951,7 +21032,7 @@
-
+
@@ -20004,7 +21085,7 @@
-
+
@@ -20013,7 +21094,7 @@
-
+
@@ -20026,7 +21107,7 @@
-
+
@@ -20121,7 +21202,7 @@
-
+
@@ -20131,7 +21212,7 @@
-
+
@@ -20147,9 +21228,27 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -20168,14 +21267,14 @@
-
+
-
+
@@ -20185,14 +21284,14 @@
-
+
-
+
@@ -20204,7 +21303,7 @@
-
+
@@ -20217,7 +21316,7 @@
-
+
@@ -20240,7 +21339,7 @@
-
+
@@ -20250,7 +21349,7 @@
-
+
@@ -20260,14 +21359,14 @@
-
+
-
+
@@ -20280,7 +21379,7 @@
-
+
@@ -20293,7 +21392,7 @@
-
+
@@ -20306,7 +21405,7 @@
-
+
@@ -20335,7 +21434,7 @@
-
+
@@ -20360,21 +21459,21 @@
-
+
-
+
-
+
@@ -20399,7 +21498,7 @@
-
+
@@ -20428,7 +21527,7 @@
-
+
@@ -20457,7 +21556,7 @@
-
+
@@ -20468,7 +21567,7 @@
-
+
@@ -20481,7 +21580,7 @@
-
+
@@ -20494,7 +21593,7 @@
-
+
@@ -20546,10 +21645,14 @@
-
+
+
+
+
+
@@ -20586,16 +21689,19 @@
+
+
+
-
+
-
+
@@ -20603,7 +21709,7 @@
-
+
@@ -20611,7 +21717,7 @@
-
+
@@ -20619,7 +21725,7 @@
-
+
@@ -20650,7 +21756,7 @@
-
+
@@ -20663,58 +21769,51 @@
-
-
-
-
-
-
-
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -20733,7 +21832,7 @@
-
+
@@ -20750,7 +21849,7 @@
-
+
@@ -20767,7 +21866,7 @@
-
+
@@ -20784,18 +21883,18 @@
-
-
+
+
-
+
-
+
@@ -20805,7 +21904,7 @@
-
+
@@ -20815,7 +21914,7 @@
-
+
@@ -20825,7 +21924,7 @@
-
+
@@ -20835,7 +21934,7 @@
-
+
@@ -20845,7 +21944,7 @@
-
+
@@ -20855,7 +21954,7 @@
-
+
@@ -20865,7 +21964,7 @@
-
+
@@ -20875,7 +21974,7 @@
-
+
@@ -20885,7 +21984,7 @@
-
+
@@ -20895,7 +21994,7 @@
-
+
@@ -20905,7 +22004,7 @@
-
+
@@ -20915,7 +22014,7 @@
-
+
@@ -20925,7 +22024,7 @@
-
+
@@ -20935,82 +22034,95 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
-
-
-
-
+
+
+
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -21023,7 +22135,7 @@
-
+
@@ -21038,16 +22150,32 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -21056,11 +22184,14 @@
-
+
+
+
+
-
+
@@ -21073,7 +22204,7 @@
-
+
@@ -21102,7 +22233,7 @@
-
+
@@ -21111,11 +22242,14 @@
-
+
+
+
+
-
+
@@ -21128,7 +22262,7 @@
-
+
@@ -21165,15 +22299,21 @@
+
+
+
-
-
+
+
+
+
+
-
+
@@ -21181,7 +22321,7 @@
-
+
@@ -21189,165 +22329,165 @@
-
-
+
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -21360,7 +22500,7 @@
-
+
@@ -21390,6 +22530,9 @@
+
+
+
@@ -21401,7 +22544,7 @@
-
+
@@ -21410,25 +22553,28 @@
-
+
+
+
+
-
+
-
-
+
+
-
-
+
+
@@ -21438,7 +22584,7 @@
-
+
@@ -21448,9 +22594,21 @@
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -21463,7 +22621,7 @@
-
+
@@ -21484,7 +22642,7 @@
-
+
@@ -21494,7 +22652,7 @@
-
+
@@ -21507,7 +22665,7 @@
-
+
@@ -21530,7 +22688,7 @@
-
+
@@ -21541,7 +22699,7 @@
-
+
@@ -21551,7 +22709,7 @@
-
+
@@ -21564,7 +22722,7 @@
-
+
@@ -21638,1007 +22796,46 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
-
+
+
-
-
+
+
-
+
+
+
+
@@ -22647,296 +22844,318 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
-
-
-
-
+
+
+
-
-
-
-
-
-
+
+
-
-
+
+
-
+
+
+
+
-
-
-
-
+
-
+
-
-
+
+
+
+
-
-
+
+
-
-
-
-
-
-
+
+
-
+
-
-
+
+
+
-
-
-
-
-
+
-
-
-
-
-
+
-
-
+
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
+
+
+
+
-
+
-
-
+
+
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
+
-
-
+
+
+
-
-
-
-
-
+
-
-
-
-
-
+
-
-
+
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
+
+
+
+
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
+
-
-
+
+
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
+
-
-
+
+
+
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
+
-
-
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
+
+
+
+
-
-
-
-
+
+
+
-
+
-
-
+
+
-
-
-
-
-
-
+
+
-
-
+
+
-
+
+
+
+
@@ -22945,71 +23164,62 @@
+
+
+
-
+
-
-
-
-
-
+
+
-
-
-
-
-
-
+
+
-
+
-
+
+
+
+
-
-
+
+
-
+
-
-
-
-
-
+
-
+
-
+
-
-
+
+
-
+
-
-
-
-
-
+
-
+
-
-
-
-
+
+
+
+
@@ -23022,33 +23232,33 @@
-
+
-
-
-
-
+
+
+
+
-
+
-
+
-
-
+
+
@@ -23058,7 +23268,7 @@
-
+
@@ -23069,7 +23279,7 @@
-
+
@@ -23082,7 +23292,7 @@
-
+
@@ -23120,50 +23330,50 @@
-
+
-
+
-
+
-
-
-
-
+
+
+
+
-
+
-
+
-
+
@@ -23175,7 +23385,7 @@
-
+
@@ -23187,7 +23397,7 @@
-
+
@@ -23199,7 +23409,7 @@
-
+
@@ -23209,21 +23419,21 @@
-
+
-
+
-
+
@@ -23233,7 +23443,7 @@
-
+
@@ -23243,7 +23453,7 @@
-
+
@@ -23255,7 +23465,7 @@
-
+
@@ -23265,7 +23475,7 @@
-
+
@@ -23277,7 +23487,7 @@
-
+
@@ -23287,7 +23497,7 @@
-
+
@@ -23299,7 +23509,7 @@
-
+
@@ -23309,7 +23519,7 @@
-
+
@@ -23321,7 +23531,7 @@
-
+
@@ -23331,7 +23541,7 @@
-
+
@@ -23343,21 +23553,21 @@
-
+
-
+
-
+
@@ -23366,22 +23576,22 @@
-
-
+
+
-
+
-
+
@@ -23389,14 +23599,14 @@
-
+
-
+
@@ -23406,14 +23616,14 @@
-
+
-
+
@@ -23423,28 +23633,28 @@
-
+
-
+
-
+
-
+
@@ -23457,7 +23667,7 @@
-
+
@@ -23470,14 +23680,14 @@
-
+
-
+
@@ -23487,7 +23697,7 @@
-
+
@@ -23497,7 +23707,7 @@
-
+
@@ -23512,7 +23722,7 @@
-
+
@@ -23525,7 +23735,7 @@
-
+
@@ -23608,7 +23818,7 @@
-
+
@@ -23618,7 +23828,7 @@
-
+
@@ -23627,95 +23837,98 @@
-
+
+
+
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -23727,14 +23940,14 @@
-
+
-
+
@@ -23747,7 +23960,7 @@
-
+
@@ -23760,7 +23973,7 @@
-
+
@@ -23770,7 +23983,7 @@
-
+
@@ -23780,21 +23993,21 @@
-
+
-
+
-
+
@@ -23807,7 +24020,7 @@
-
+
@@ -23820,7 +24033,7 @@
-
+
@@ -23833,7 +24046,7 @@
-
+
@@ -23846,21 +24059,21 @@
-
+
-
+
-
+
@@ -23869,28 +24082,28 @@
-
+
-
+
-
+
-
+
@@ -23899,7 +24112,7 @@
-
+
@@ -23918,7 +24131,7 @@
-
+
@@ -23937,21 +24150,21 @@
-
+
-
+
-
+
@@ -23967,7 +24180,7 @@
-
+
@@ -23977,15 +24190,15 @@
-
+
-
-
+
+
@@ -23993,14 +24206,14 @@
-
+
-
+
@@ -24009,7 +24222,7 @@
-
+
@@ -24022,7 +24235,7 @@
-
+
@@ -24055,7 +24268,7 @@
-
+
@@ -24065,10 +24278,11 @@
+
-
+
@@ -24081,10 +24295,11 @@
+
-
+
@@ -24094,7 +24309,7 @@
-
+
@@ -24107,10 +24322,11 @@
+
-
+
@@ -24123,12 +24339,13 @@
+
-
+
@@ -24248,14 +24465,14 @@
-
+
-
+
@@ -24283,77 +24500,77 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -24363,49 +24580,49 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -24414,7 +24631,7 @@
-
+
@@ -24538,6 +24755,12 @@
+
+
+
+
+
+
@@ -24588,7 +24811,7 @@
-
+
@@ -24598,7 +24821,7 @@
-
+
@@ -24618,9 +24841,12 @@
-
+
+
+
+
-
+
@@ -24629,9 +24855,12 @@
-
+
+
+
+
-
+
@@ -24643,9 +24872,12 @@
-
+
+
+
+
-
+
@@ -24665,9 +24897,12 @@
-
+
+
+
+
-
+
@@ -24680,7 +24915,7 @@
-
+
@@ -24813,11 +25048,17 @@
+
+
+
+
+
+
-
+
@@ -24827,7 +25068,7 @@
-
+
@@ -24840,7 +25081,7 @@
-
+
@@ -24861,7 +25102,7 @@
-
+
@@ -24874,7 +25115,7 @@
-
+
@@ -24895,7 +25136,7 @@
-
+
@@ -24905,14 +25146,14 @@
-
+
-
+
@@ -24922,9 +25163,9 @@
-
-
-
+
+
+
@@ -24937,7 +25178,7 @@
-
+
@@ -24950,7 +25191,7 @@
-
+
@@ -24960,7 +25201,7 @@
-
+
@@ -24970,7 +25211,7 @@
-
+
@@ -24980,7 +25221,7 @@
-
+
@@ -24990,7 +25231,7 @@
-
+
@@ -25008,22 +25249,22 @@
-
+
-
-
+
+
-
+
@@ -25036,7 +25277,7 @@
-
+
@@ -25061,7 +25302,7 @@
-
+
@@ -25070,9 +25311,11 @@
-
+
+
+
-
+
@@ -25085,9 +25328,11 @@
-
+
+
+
-
+
@@ -25097,9 +25342,11 @@
-
+
+
+
-
+
@@ -25109,7 +25356,7 @@
-
+
@@ -25118,7 +25365,7 @@
-
+
@@ -25130,11 +25377,11 @@
-
-
-
-
-
+
+
+
+
+
@@ -25146,7 +25393,7 @@
-
+
@@ -25158,7 +25405,7 @@
-
+
@@ -25170,7 +25417,7 @@
-
+
@@ -25182,7 +25429,7 @@
-
+
@@ -25194,7 +25441,7 @@
-
+
@@ -25206,7 +25453,7 @@
-
+
@@ -25216,7 +25463,7 @@
-
+
@@ -25226,9 +25473,9 @@
-
-
-
+
+
+
@@ -25244,7 +25491,7 @@
-
+
@@ -25260,7 +25507,7 @@
-
+
@@ -25276,7 +25523,7 @@
-
+
@@ -25294,7 +25541,7 @@
-
+
@@ -25345,14 +25592,14 @@
-
+
-
+
@@ -25361,7 +25608,7 @@
-
+
@@ -25371,7 +25618,7 @@
-
+
@@ -25390,10 +25637,10 @@
-
+
-
+
@@ -25418,7 +25665,7 @@
-
+
@@ -25436,7 +25683,7 @@
-
+
@@ -25447,7 +25694,7 @@
-
+
@@ -25471,7 +25718,7 @@
-
+
@@ -25488,7 +25735,7 @@
-
+
@@ -25501,7 +25748,7 @@
-
+
@@ -25514,7 +25761,7 @@
-
+
@@ -25526,22 +25773,22 @@
-
+
-
+
-
-
+
+
@@ -25551,7 +25798,7 @@
-
+
@@ -25564,14 +25811,14 @@
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
@@ -25584,7 +25831,7 @@
-
+
@@ -25599,7 +25846,7 @@
-
+
@@ -25633,7 +25880,7 @@
-
+
@@ -25665,14 +25912,14 @@
-
+
-
+
@@ -25689,7 +25936,7 @@
-
+
@@ -25704,14 +25951,14 @@
-
+
-
+
@@ -25724,7 +25971,7 @@
-
+
@@ -25754,7 +26001,7 @@
-
+
@@ -25764,7 +26011,7 @@
-
+
@@ -25774,7 +26021,7 @@
-
+
@@ -25784,7 +26031,7 @@
-
+
@@ -25794,7 +26041,7 @@
-
+
@@ -25806,7 +26053,7 @@
-
+
@@ -25836,32 +26083,42 @@
-
+
-
+
-
+
+
+
+
+
+
-
+
-
+
+
+
+
+
+
-
+
@@ -25874,7 +26131,7 @@
-
+
@@ -25916,7 +26173,7 @@
-
+
@@ -25926,7 +26183,7 @@
-
+
@@ -25936,7 +26193,7 @@
-
+
@@ -25952,7 +26209,7 @@
-
+
@@ -25965,7 +26222,7 @@
-
+
@@ -25991,7 +26248,7 @@
-
+
@@ -26001,14 +26258,14 @@
-
+
-
+
@@ -26021,7 +26278,7 @@
-
+
@@ -26061,7 +26318,7 @@
-
+
@@ -26071,7 +26328,7 @@
-
+
@@ -26087,7 +26344,7 @@
-
+
@@ -26105,7 +26362,7 @@
-
+
@@ -26118,7 +26375,7 @@
-
+
@@ -26154,7 +26411,7 @@
-
+
@@ -26166,7 +26423,7 @@
-
+
@@ -26178,7 +26435,7 @@
-
+
@@ -26191,7 +26448,7 @@
-
+
@@ -26205,7 +26462,7 @@
-
+
@@ -26228,7 +26485,7 @@
-
+
@@ -26239,14 +26496,14 @@
-
+
-
+
@@ -26263,24 +26520,24 @@
-
+
-
+
-
-
-
-
+
+
+
+
@@ -26333,44 +26590,44 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -26391,7 +26648,7 @@
-
+
@@ -26415,7 +26672,7 @@
-
+
@@ -26437,7 +26694,7 @@
-
+
@@ -26504,7 +26761,7 @@
-
+
@@ -26553,7 +26810,7 @@
-
+
@@ -26567,7 +26824,7 @@
-
+
@@ -26583,7 +26840,7 @@
-
+
@@ -26596,7 +26853,7 @@
-
+
@@ -26609,7 +26866,7 @@
-
+
@@ -26620,7 +26877,7 @@
-
+
@@ -26631,7 +26888,7 @@
-
+
@@ -26643,8 +26900,33 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -26657,7 +26939,7 @@
-
+
@@ -26715,7 +26997,7 @@
-
+
@@ -26729,7 +27011,7 @@
-
+
@@ -26739,7 +27021,7 @@
-
+
@@ -26749,9 +27031,11 @@
-
+
+
+
-
+
@@ -26765,9 +27049,11 @@
-
+
+
+
-
+
@@ -26777,15 +27063,17 @@
-
+
+
+
-
+
-
+
@@ -26793,7 +27081,7 @@
-
+
@@ -26829,7 +27117,7 @@
-
+
@@ -26842,7 +27130,7 @@
-
+
@@ -26868,7 +27156,7 @@
-
+
@@ -26953,14 +27241,14 @@
-
+
-
+
@@ -26972,7 +27260,7 @@
-
+
@@ -26985,7 +27273,7 @@
-
+
@@ -27007,7 +27295,7 @@
-
+
@@ -27017,7 +27305,7 @@
-
+
@@ -27027,8 +27315,8 @@
-
-
+
+
@@ -27043,7 +27331,7 @@
-
+
@@ -27067,7 +27355,7 @@
-
+
@@ -27080,7 +27368,7 @@
-
+
@@ -27144,7 +27432,7 @@
-
+
@@ -27204,34 +27492,34 @@
-
+
-
-
+
+
-
+
-
+
-
+
-
+
@@ -27241,7 +27529,7 @@
-
+
@@ -27265,7 +27553,7 @@
-
+
@@ -27278,7 +27566,7 @@
-
+
@@ -27295,7 +27583,7 @@
-
+
@@ -27312,7 +27600,7 @@
-
+
@@ -27323,21 +27611,21 @@
-
+
-
+
-
+
@@ -27347,7 +27635,7 @@
-
+
@@ -27359,7 +27647,7 @@
-
+
@@ -27369,7 +27657,7 @@
-
+
@@ -27379,7 +27667,7 @@
-
+
@@ -27389,7 +27677,7 @@
-
+
@@ -27399,7 +27687,7 @@
-
+
@@ -27409,7 +27697,7 @@
-
+
@@ -27419,7 +27707,7 @@
-
+
@@ -27429,7 +27717,7 @@
-
+
@@ -27439,7 +27727,7 @@
-
+
@@ -27449,7 +27737,7 @@
-
+
@@ -27459,31 +27747,31 @@
-
-
+
+
-
+
-
-
-
+
+
+
-
+
@@ -27491,7 +27779,7 @@
-
+
@@ -27499,9 +27787,9 @@
-
-
-
+
+
+
@@ -27514,12 +27802,12 @@
-
+
-
+
@@ -27530,17 +27818,17 @@
-
+
-
+
-
+
@@ -27559,39 +27847,39 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -27608,7 +27896,7 @@
-
+
@@ -27621,7 +27909,7 @@
-
+
@@ -27658,7 +27946,7 @@
-
+
@@ -27669,7 +27957,7 @@
-
+
@@ -27682,7 +27970,7 @@
-
+
@@ -27695,7 +27983,7 @@
-
+
@@ -27708,7 +27996,7 @@
-
+
@@ -27721,28 +28009,28 @@
-
+
-
+
-
+
-
+
@@ -27752,14 +28040,14 @@
-
+
-
+
@@ -27769,7 +28057,7 @@
-
+
@@ -27781,14 +28069,14 @@
-
+
-
+
@@ -27803,7 +28091,7 @@
-
+
@@ -27821,7 +28109,7 @@
-
+
@@ -27831,7 +28119,7 @@
-
+
@@ -27841,7 +28129,7 @@
-
+
@@ -27851,196 +28139,196 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -28051,7 +28339,7 @@
-
+
@@ -28062,7 +28350,7 @@
-
+
@@ -28070,7 +28358,7 @@
-
+
@@ -28078,21 +28366,21 @@
-
+
-
+
-
+
@@ -28108,7 +28396,7 @@
-
+
@@ -28117,14 +28405,14 @@
-
+
-
+
@@ -28143,7 +28431,7 @@
-
+
@@ -28155,7 +28443,7 @@
-
+
@@ -28165,7 +28453,7 @@
-
+
@@ -28178,7 +28466,7 @@
-
+
@@ -28191,7 +28479,7 @@
-
+
@@ -28204,7 +28492,7 @@
-
+
@@ -28220,7 +28508,7 @@
-
+
@@ -28236,14 +28524,14 @@
-
+
-
+
@@ -28251,7 +28539,7 @@
-
+
@@ -28259,24 +28547,24 @@
-
+
-
+
-
-
-
-
+
+
+
+
@@ -28292,7 +28580,7 @@
-
+
@@ -28308,7 +28596,7 @@
-
+
@@ -28321,7 +28609,7 @@
-
+
@@ -28334,7 +28622,7 @@
-
+
@@ -28347,7 +28635,7 @@
-
+
@@ -28360,7 +28648,7 @@
-
+
@@ -28370,7 +28658,7 @@
-
+
@@ -28382,7 +28670,7 @@
-
+
@@ -28416,28 +28704,28 @@
-
+
-
+
-
+
-
+
@@ -28446,7 +28734,7 @@
-
+
@@ -28459,7 +28747,7 @@
-
+
@@ -28522,7 +28810,7 @@
-
+
@@ -28532,7 +28820,7 @@
-
+
@@ -28543,7 +28831,7 @@
-
+
@@ -28552,7 +28840,7 @@
-
+
@@ -28565,7 +28853,7 @@
-
+
@@ -28594,7 +28882,7 @@
-
+
@@ -28604,7 +28892,7 @@
-
+
@@ -28617,7 +28905,7 @@
-
+
@@ -28646,7 +28934,7 @@
-
+
@@ -28659,13 +28947,13 @@
-
-
+
+
-
+
@@ -28682,7 +28970,7 @@
-
+
@@ -28701,28 +28989,28 @@
-
+
-
+
-
+
-
+
@@ -28735,7 +29023,7 @@
-
+
@@ -28782,6 +29070,9 @@
+
+
+
@@ -28823,21 +29114,21 @@
-
+
-
+
-
+
@@ -28850,7 +29141,7 @@
-
+
@@ -28862,7 +29153,7 @@
-
+
@@ -28875,7 +29166,7 @@
-
+
@@ -28914,7 +29205,7 @@
-
+
@@ -28927,7 +29218,7 @@
-
+
@@ -28951,6 +29242,9 @@
+
+
+
@@ -29002,7 +29296,7 @@
-
+
@@ -29017,7 +29311,7 @@
-
+
@@ -29038,7 +29332,7 @@
-
+
@@ -29048,7 +29342,7 @@
-
+
@@ -29058,7 +29352,7 @@
-
+
@@ -29068,7 +29362,7 @@
-
+
@@ -29078,7 +29372,7 @@
-
+
@@ -29088,7 +29382,7 @@
-
+
@@ -29130,7 +29424,7 @@
-
+
@@ -29174,7 +29468,7 @@
-
+
@@ -29198,14 +29492,14 @@
-
+
-
+
@@ -29247,7 +29541,7 @@
-
+
@@ -29291,7 +29585,7 @@
-
+
@@ -29315,7 +29609,7 @@
-
+
@@ -29357,7 +29651,7 @@
-
+
@@ -29401,7 +29695,7 @@
-
+
@@ -29425,7 +29719,7 @@
-
+
@@ -29467,7 +29761,7 @@
-
+
@@ -29511,7 +29805,7 @@
-
+
@@ -29535,14 +29829,14 @@
-
+
-
+
@@ -29584,7 +29878,7 @@
-
+
@@ -29628,7 +29922,7 @@
-
+
@@ -29652,7 +29946,7 @@
-
+
@@ -29694,7 +29988,7 @@
-
+
@@ -29738,7 +30032,7 @@
-
+
@@ -29762,7 +30056,7 @@
-
+
@@ -29804,7 +30098,7 @@
-
+
@@ -29848,7 +30142,7 @@
-
+
@@ -29872,7 +30166,7 @@
-
+
@@ -29914,7 +30208,7 @@
-
+
@@ -29958,7 +30252,7 @@
-
+
@@ -29982,7 +30276,7 @@
-
+
@@ -30024,7 +30318,7 @@
-
+
@@ -30068,7 +30362,7 @@
-
+
@@ -30092,7 +30386,7 @@
-
+
@@ -30134,7 +30428,7 @@
-
+
@@ -30178,7 +30472,7 @@
-
+
@@ -30202,21 +30496,138 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
@@ -30258,7 +30669,7 @@
-
+
@@ -30302,7 +30713,7 @@
-
+
@@ -30326,159 +30737,56 @@
-
+
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
-
+
-
+
-
+
-
+
@@ -30516,21 +30824,21 @@
-
+
-
+
-
+
@@ -30567,7 +30875,7 @@
-
+
@@ -30604,7 +30912,7 @@
-
+
@@ -30641,7 +30949,7 @@
-
+
@@ -30678,7 +30986,7 @@
-
+
@@ -30715,7 +31023,7 @@
-
+
@@ -30752,7 +31060,7 @@
-
+
@@ -30789,7 +31097,7 @@
-
+
@@ -30826,7 +31134,7 @@
-
+
@@ -30837,7 +31145,7 @@
-
+
@@ -30880,7 +31188,7 @@
-
+
@@ -30920,7 +31228,7 @@
-
+
@@ -30960,14 +31268,14 @@
-
+
-
+
@@ -31014,7 +31322,7 @@
-
+
@@ -31057,7 +31365,7 @@
-
+
@@ -31073,7 +31381,7 @@
-
+
@@ -31120,7 +31428,7 @@
-
+
@@ -31163,7 +31471,7 @@
-
+
@@ -31179,14 +31487,14 @@
-
+
-
+
@@ -31199,7 +31507,7 @@
-
+
@@ -31212,7 +31520,7 @@
-
+
@@ -31222,7 +31530,7 @@
-
+
@@ -31237,8 +31545,8 @@
-
-
+
+
@@ -31246,7 +31554,7 @@
-
+
@@ -31254,7 +31562,7 @@
-
+
@@ -31262,7 +31570,7 @@
-
+
@@ -31270,7 +31578,7 @@
-
+
@@ -31278,7 +31586,7 @@
-
+
@@ -31293,7 +31601,7 @@
-
+
@@ -31308,7 +31616,7 @@
-
+
@@ -31322,7 +31630,7 @@
-
+
@@ -31330,21 +31638,21 @@
-
+
-
+
-
+
@@ -31354,21 +31662,21 @@
-
+
-
+
-
+
@@ -31413,14 +31721,14 @@
-
+
-
+
@@ -31465,14 +31773,14 @@
-
+
-
+
@@ -31517,21 +31825,21 @@
-
+
-
+
-
+
@@ -31543,14 +31851,14 @@
-
+
-
+
@@ -31559,7 +31867,7 @@
-
+
@@ -31570,7 +31878,7 @@
-
+
@@ -31581,7 +31889,7 @@
-
+
@@ -31592,7 +31900,7 @@
-
+
@@ -31603,21 +31911,21 @@
-
+
-
+
-
-
-
+
+
+
@@ -31627,10 +31935,10 @@
-
+
-
+
@@ -31680,7 +31988,7 @@
-
+
@@ -31690,14 +31998,14 @@
-
+
-
+
@@ -31707,7 +32015,7 @@
-
+
@@ -31719,12 +32027,12 @@
-
+
-
+
@@ -31733,7 +32041,7 @@
-
+
@@ -31741,7 +32049,7 @@
-
+
@@ -31749,7 +32057,7 @@
-
+
@@ -31760,7 +32068,7 @@
-
+
@@ -31770,14 +32078,14 @@
-
+
-
+
@@ -31787,7 +32095,7 @@
-
+
@@ -31800,7 +32108,7 @@
-
+
@@ -31813,7 +32121,7 @@
-
+
@@ -31822,7 +32130,7 @@
-
+
@@ -31831,7 +32139,7 @@
-
+
@@ -31840,7 +32148,7 @@
-
+
@@ -31849,8 +32157,8 @@
-
-
+
+
@@ -31863,7 +32171,7 @@
-
+
@@ -31876,28 +32184,28 @@
-
+
-
+
-
+
-
+
@@ -31909,7 +32217,7 @@
-
+
@@ -31921,7 +32229,7 @@
-
+
@@ -31936,7 +32244,7 @@
-
+
@@ -31951,7 +32259,7 @@
-
+
@@ -31964,7 +32272,7 @@
-
+
@@ -31989,7 +32297,7 @@
-
+
@@ -32011,21 +32319,21 @@
-
+
-
+
-
+
@@ -32038,7 +32346,7 @@
-
+
@@ -32082,7 +32390,7 @@
-
+
@@ -32098,7 +32406,7 @@
-
+
@@ -32106,14 +32414,14 @@
-
+
-
+
@@ -32121,7 +32429,7 @@
-
+
@@ -32137,7 +32445,7 @@
-
+
@@ -32153,7 +32461,7 @@
-
+
@@ -32166,7 +32474,7 @@
-
+
@@ -32179,7 +32487,7 @@
-
+
@@ -32192,7 +32500,7 @@
-
+
@@ -32205,7 +32513,7 @@
-
+
@@ -32218,7 +32526,7 @@
-
+
@@ -32231,7 +32539,7 @@
-
+
@@ -32244,14 +32552,14 @@
-
+
-
+
@@ -32264,7 +32572,7 @@
-
+
@@ -32277,14 +32585,14 @@
-
+
-
+
@@ -32294,7 +32602,7 @@
-
+
@@ -32304,7 +32612,7 @@
-
+
@@ -32380,9 +32688,9 @@
-
-
-
+
+
+
@@ -32450,7 +32758,7 @@
-
+
@@ -32459,7 +32767,7 @@
-
+
@@ -32472,7 +32780,7 @@
-
+
@@ -32521,6 +32829,7 @@
+
@@ -32677,7 +32986,7 @@
-
+
@@ -32880,7 +33189,7 @@
-
+
@@ -32893,7 +33202,7 @@
-
+
@@ -32904,7 +33213,7 @@
-
+
@@ -32915,9 +33224,11 @@
-
+
+
+
-
+
@@ -32931,9 +33242,11 @@
-
+
+
+
-
+
@@ -32949,9 +33262,11 @@
-
+
+
+
-
+
@@ -32961,9 +33276,11 @@
-
+
+
+
-
+
@@ -32973,9 +33290,11 @@
-
+
+
+
-
+
@@ -32988,9 +33307,11 @@
-
+
+
+
-
+
@@ -33008,7 +33329,7 @@
-
+
@@ -33018,7 +33339,7 @@
-
+
@@ -33064,7 +33385,7 @@
-
+
@@ -33092,7 +33413,7 @@
-
+
@@ -33138,7 +33459,7 @@
-
+
@@ -33187,7 +33508,7 @@
-
+
@@ -33197,7 +33518,7 @@
-
+
@@ -33249,8 +33570,8 @@
-
-
+
+
@@ -33260,7 +33581,7 @@
-
+
@@ -33270,7 +33591,7 @@
-
+
@@ -33279,7 +33600,7 @@
-
+
@@ -33296,7 +33617,7 @@
-
+
@@ -33308,7 +33629,7 @@
-
+
@@ -33321,7 +33642,7 @@
-
+
@@ -33337,7 +33658,7 @@
-
+
@@ -33349,7 +33670,7 @@
-
+
@@ -33362,7 +33683,7 @@
-
+
@@ -33370,15 +33691,15 @@
-
+
-
+
-
+
@@ -33401,7 +33722,7 @@
-
+
@@ -33409,7 +33730,7 @@
-
+
@@ -33420,7 +33741,7 @@
-
+
@@ -33433,7 +33754,7 @@
-
+
@@ -33513,7 +33834,7 @@
-
+
@@ -33524,7 +33845,7 @@
-
+
@@ -33532,7 +33853,7 @@
-
+
@@ -33545,7 +33866,7 @@
-
+
@@ -33558,7 +33879,7 @@
-
+
@@ -33581,7 +33902,7 @@
-
+
@@ -33591,7 +33912,7 @@
-
+
@@ -33604,8 +33925,8 @@
-
-
+
+
@@ -33618,15 +33939,23 @@
-
+
-
+
+
+
+
+
+
+
+
+
-
+
@@ -33639,7 +33968,7 @@
-
+
@@ -33751,6 +34080,7 @@
+
@@ -33844,6 +34174,11 @@
+
+
+
+
+
@@ -33884,9 +34219,6 @@
-
-
-
@@ -33923,10 +34255,18 @@
+
+
+
+
+
+
+
+
-
+
@@ -33936,7 +34276,7 @@
-
+
@@ -33949,6 +34289,7 @@
+
@@ -33963,7 +34304,7 @@
-
+
@@ -33979,6 +34320,7 @@
+
@@ -33993,7 +34335,7 @@
-
+
@@ -34020,6 +34362,7 @@
+
@@ -34034,7 +34377,7 @@
-
+
@@ -34066,6 +34409,7 @@
+
@@ -34080,7 +34424,7 @@
-
+
@@ -34112,28 +34456,28 @@
-
+
-
+
-
+
-
+
@@ -34143,22 +34487,22 @@
-
+
-
-
+
+
-
+
@@ -34172,7 +34516,7 @@
-
+
@@ -34186,7 +34530,7 @@
-
+
@@ -34200,30 +34544,30 @@
-
+
-
-
-
+
+
+
-
+
-
+
@@ -34233,7 +34577,7 @@
-
+
@@ -34243,7 +34587,7 @@
-
+
@@ -34253,7 +34597,7 @@
-
+
@@ -34285,7 +34629,7 @@
-
+
@@ -34317,7 +34661,7 @@
-
+
@@ -34330,7 +34674,7 @@
-
+
@@ -34347,8 +34691,8 @@
-
-
+
+
@@ -34356,7 +34700,7 @@
-
+
@@ -34364,18 +34708,18 @@
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
+
@@ -34388,7 +34732,7 @@
-
+
@@ -34410,7 +34754,7 @@
-
+
@@ -34422,7 +34766,7 @@
-
+
@@ -34435,7 +34779,7 @@
-
+
@@ -34447,9 +34791,14 @@
-
+
+
+
+
+
+
-
+
@@ -34458,7 +34807,7 @@
-
+
@@ -34468,7 +34817,7 @@
-
+
@@ -34483,7 +34832,7 @@
-
+
@@ -34495,7 +34844,7 @@
-
+
@@ -34509,84 +34858,84 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -34594,7 +34943,7 @@
-
+
@@ -34602,7 +34951,7 @@
-
+
@@ -34610,7 +34959,7 @@
-
+
@@ -34618,26 +34967,26 @@
-
+
-
+
-
+
-
+
@@ -34650,7 +34999,7 @@
-
+
@@ -34663,7 +35012,7 @@
-
+
@@ -34673,7 +35022,7 @@
-
+
@@ -34683,7 +35032,7 @@
-
+
@@ -34693,14 +35042,14 @@
-
+
-
+
@@ -34710,14 +35059,14 @@
-
+
-
+
@@ -34727,14 +35076,14 @@
-
+
-
+
@@ -34744,14 +35093,14 @@
-
+
-
+
@@ -34761,7 +35110,7 @@
-
+
@@ -34770,7 +35119,7 @@
-
+
@@ -34783,7 +35132,7 @@
-
+
@@ -34826,16 +35175,19 @@
-
+
-
+
+
+
+
-
+
@@ -34844,7 +35196,7 @@
-
+
@@ -34857,7 +35209,7 @@
-
+
@@ -34890,7 +35242,7 @@
-
+
@@ -34900,7 +35252,7 @@
-
+
@@ -34910,7 +35262,7 @@
-
+
@@ -34920,14 +35272,14 @@
-
+
-
+
@@ -34936,7 +35288,7 @@
-
+
@@ -34944,7 +35296,7 @@
-
+
@@ -34952,7 +35304,7 @@
-
+
@@ -34960,7 +35312,7 @@
-
+
@@ -34968,27 +35320,27 @@
-
+
-
+
-
+
-
+
-
+
@@ -34996,7 +35348,7 @@
-
+
@@ -35004,7 +35356,7 @@
-
+
@@ -35018,7 +35370,7 @@
-
+
@@ -35032,7 +35384,7 @@
-
+
@@ -35120,7 +35472,7 @@
-
+
@@ -35130,9 +35482,11 @@
-
+
+
+
-
+
@@ -35147,7 +35501,7 @@
-
+
@@ -35157,9 +35511,11 @@
-
+
+
+
-
+
@@ -35169,9 +35525,11 @@
-
+
+
+
-
+
@@ -35181,14 +35539,14 @@
-
+
-
+
@@ -35198,7 +35556,7 @@
-
+
@@ -35208,7 +35566,7 @@
-
+
@@ -35220,7 +35578,7 @@
-
+
@@ -35229,7 +35587,7 @@
-
+
@@ -35239,7 +35597,7 @@
-
+
@@ -35248,7 +35606,7 @@
-
+
@@ -35266,7 +35624,7 @@
-
+
@@ -35278,7 +35636,7 @@
-
+
@@ -35291,7 +35649,7 @@
-
+
@@ -35325,12 +35683,12 @@
-
+
-
+
@@ -35340,7 +35698,7 @@
-
+
@@ -35350,8 +35708,8 @@
-
-
+
+
@@ -35363,64 +35721,64 @@
-
+
-
-
+
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -35429,70 +35787,70 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -35501,11 +35859,12 @@
-
-
-
-
-
+
+
+
+
+
+
@@ -35519,7 +35878,7 @@
-
+
@@ -35533,24 +35892,29 @@
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
+
@@ -35563,7 +35927,7 @@
-
+
@@ -35579,7 +35943,7 @@
-
+
@@ -35595,7 +35959,7 @@
-
+
@@ -35611,7 +35975,7 @@
-
+
@@ -35624,7 +35988,7 @@
-
+
@@ -35637,7 +36001,7 @@
-
+
@@ -35647,14 +36011,14 @@
-
+
-
+
@@ -35664,7 +36028,7 @@
-
+
@@ -35678,7 +36042,7 @@
-
+
@@ -35689,7 +36053,7 @@
-
+
@@ -35702,7 +36066,7 @@
-
+
@@ -35715,29 +36079,29 @@
-
+
-
+
-
-
+
+
-
+
@@ -35745,8 +36109,46 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -35762,7 +36164,7 @@
-
+
@@ -35850,7 +36252,7 @@
-
+
@@ -35863,9 +36265,11 @@
-
+
+
+
-
+
@@ -35883,7 +36287,7 @@
-
+
@@ -35893,9 +36297,11 @@
-
+
+
+
-
+
@@ -35905,9 +36311,11 @@
-
+
+
+
-
+
@@ -35917,7 +36325,7 @@
-
+
@@ -35927,7 +36335,7 @@
-
+
@@ -35937,21 +36345,21 @@
-
+
-
+
-
+
@@ -35961,7 +36369,7 @@
-
+
@@ -35971,7 +36379,7 @@
-
+
@@ -35980,31 +36388,31 @@
-
+
-
-
+
+
-
+
-
+
-
+
@@ -36012,7 +36420,7 @@
-
+
@@ -36021,7 +36429,7 @@
-
+
@@ -36031,7 +36439,7 @@
-
+
@@ -36042,7 +36450,7 @@
-
+
@@ -36054,7 +36462,7 @@
-
+
@@ -36067,7 +36475,7 @@
-
+
@@ -36081,14 +36489,14 @@
-
+
-
+
@@ -36098,7 +36506,7 @@
-
+
@@ -36111,7 +36519,7 @@
-
+
@@ -36127,7 +36535,7 @@
-
+
@@ -36146,8 +36554,8 @@
-
-
+
+
@@ -36160,7 +36568,7 @@
-
+
@@ -36173,7 +36581,7 @@
-
+
@@ -36183,7 +36591,7 @@
-
+
@@ -36193,19 +36601,19 @@
-
+
-
+
-
+
@@ -36215,7 +36623,7 @@
-
+
@@ -36225,19 +36633,19 @@
-
+
-
+
-
+
@@ -36247,7 +36655,7 @@
-
+
@@ -36257,7 +36665,7 @@
-
+
@@ -36267,7 +36675,7 @@
-
+
@@ -36277,25 +36685,25 @@
-
+
-
+
-
+
-
+
@@ -36303,7 +36711,7 @@
-
+
@@ -36311,7 +36719,7 @@
-
+
@@ -36319,7 +36727,7 @@
-
+
@@ -36327,14 +36735,14 @@
-
+
-
+
@@ -36342,7 +36750,7 @@
-
+
@@ -36350,28 +36758,28 @@
-
+
-
+
-
+
-
+
@@ -36379,7 +36787,7 @@
-
+
@@ -36387,7 +36795,7 @@
-
+
@@ -36395,7 +36803,7 @@
-
+
@@ -36403,14 +36811,14 @@
-
+
-
+
@@ -36423,7 +36831,7 @@
-
+
@@ -36436,7 +36844,7 @@
-
+
@@ -36449,7 +36857,7 @@
-
+
@@ -36462,7 +36870,7 @@
-
+
@@ -36472,7 +36880,7 @@
-
+
@@ -36482,28 +36890,28 @@
-
+
-
+
-
+
-
+
@@ -36516,7 +36924,7 @@
-
+
@@ -36529,7 +36937,7 @@
-
+
@@ -36538,7 +36946,7 @@
-
+
@@ -36547,7 +36955,7 @@
-
+
@@ -36559,7 +36967,7 @@
-
+
@@ -36570,7 +36978,7 @@
-
+
@@ -36581,7 +36989,7 @@
-
+
@@ -36589,7 +36997,7 @@
-
+
@@ -36597,11 +37005,13 @@
-
+
-
+
+
+
-
+
@@ -36611,11 +37021,13 @@
-
+
-
+
+
+
-
+
@@ -36625,35 +37037,39 @@
-
+
-
+
+
+
-
+
-
+
-
+
+
+
-
+
-
+
-
+
-
+
@@ -36663,13 +37079,13 @@
-
+
-
+
-
+
@@ -36679,33 +37095,33 @@
-
+
-
+
-
+
-
+
-
+
-
+
-
+
@@ -36713,9 +37129,9 @@
-
-
-
+
+
+
@@ -36723,7 +37139,7 @@
-
+
@@ -36731,22 +37147,22 @@
-
+
-
+
-
-
+
+
@@ -36754,7 +37170,7 @@
-
+
@@ -36762,7 +37178,7 @@
-
+
@@ -36770,17 +37186,17 @@
-
-
-
-
+
+
+
+
-
+
@@ -36796,7 +37212,7 @@
-
+
@@ -36812,21 +37228,21 @@
-
+
-
+
-
+
@@ -36842,7 +37258,7 @@
-
+
@@ -36858,7 +37274,7 @@
-
+
@@ -36871,7 +37287,7 @@
-
+
@@ -36884,7 +37300,7 @@
-
+
@@ -36900,7 +37316,7 @@
-
+
@@ -36916,7 +37332,7 @@
-
+
@@ -36929,7 +37345,7 @@
-
+
@@ -36942,7 +37358,7 @@
-
+
@@ -36952,7 +37368,7 @@
-
+
@@ -36962,7 +37378,7 @@
-
+
@@ -36974,7 +37390,7 @@
-
+
@@ -36986,7 +37402,7 @@
-
+
@@ -36994,7 +37410,7 @@
-
+
@@ -37002,7 +37418,7 @@
-
+
@@ -37010,11 +37426,13 @@
-
+
-
+
+
+
-
+
@@ -37024,11 +37442,13 @@
-
+
-
+
+
+
-
+
@@ -37038,18 +37458,20 @@
-
+
-
+
+
+
-
+
-
+
@@ -37057,8 +37479,8 @@
-
-
+
+
@@ -37070,7 +37492,7 @@
-
+
@@ -37082,7 +37504,7 @@
-
+
@@ -37094,7 +37516,7 @@
-
+
@@ -37106,7 +37528,7 @@
-
+
@@ -37118,7 +37540,7 @@
-
+
@@ -37130,7 +37552,7 @@
-
+
@@ -37142,7 +37564,7 @@
-
+
@@ -37154,7 +37576,7 @@
-
+
@@ -37166,7 +37588,7 @@
-
+
@@ -37178,7 +37600,7 @@
-
+
@@ -37188,7 +37610,7 @@
-
+
@@ -37198,7 +37620,7 @@
-
+
@@ -37207,7 +37629,7 @@
-
+
@@ -37216,9 +37638,9 @@
-
-
-
+
+
+
@@ -37230,7 +37652,7 @@
-
+
@@ -37243,14 +37665,14 @@
-
+
-
+
@@ -37262,22 +37684,22 @@
-
-
+
+
-
+
-
+
@@ -37287,7 +37709,7 @@
-
+
@@ -37297,7 +37719,7 @@
-
+
@@ -37310,7 +37732,7 @@
-
+
@@ -37318,7 +37740,7 @@
-
+
@@ -37329,7 +37751,7 @@
-
+
@@ -37343,7 +37765,7 @@
-
+
@@ -37360,7 +37782,7 @@
-
+
@@ -37380,7 +37802,7 @@
-
+
@@ -37403,7 +37825,7 @@
-
+
@@ -37429,7 +37851,7 @@
-
+
@@ -37437,7 +37859,7 @@
-
+
@@ -37448,7 +37870,7 @@
-
+
@@ -37462,7 +37884,7 @@
-
+
@@ -37479,7 +37901,7 @@
-
+
@@ -37499,7 +37921,7 @@
-
+
@@ -37522,7 +37944,7 @@
-
+
@@ -37548,24 +37970,24 @@
-
+
-
+
-
+
-
+