summaryrefslogtreecommitdiff
path: root/tex
diff options
context:
space:
mode:
authorContext Git Mirror Bot <phg42.2a@gmail.com>2014-09-25 19:15:03 +0200
committerContext Git Mirror Bot <phg42.2a@gmail.com>2014-09-25 19:15:03 +0200
commitf7ecbf1b2c879f004c9276d5cec634814d78b576 (patch)
tree1b240cb2be3e9e3699741ffb7aae5bb88f8ff372 /tex
parent6f8440fd75c76e40620c2110ac445ab92635460c (diff)
downloadcontext-f7ecbf1b2c879f004c9276d5cec634814d78b576.tar.gz
2014-09-25 19:00:00
Diffstat (limited to 'tex')
-rw-r--r--tex/context/base/back-exp.lua781
-rw-r--r--tex/context/base/back-exp.mkiv6
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4385 -> 4390 bytes
-rw-r--r--tex/context/base/context.mkiv3
-rw-r--r--tex/context/base/data-res.lua3
-rw-r--r--tex/context/base/data-sch.lua14
-rw-r--r--tex/context/base/export-example.css24
-rw-r--r--tex/context/base/grph-inc.lua103
-rw-r--r--tex/context/base/lang-hyp.lua663
-rw-r--r--tex/context/base/lang-hyp.mkiv109
-rw-r--r--tex/context/base/math-fbk.lua42
-rw-r--r--tex/context/base/math-ini.lua46
-rw-r--r--tex/context/base/math-ini.mkiv7
-rw-r--r--tex/context/base/math-int.mkiv2
-rw-r--r--tex/context/base/math-stc.mkvi277
-rw-r--r--tex/context/base/math-tag.lua546
-rw-r--r--tex/context/base/mult-def.mkiv6
-rw-r--r--tex/context/base/node-ltp.lua6
-rw-r--r--tex/context/base/spac-ver.lua27
-rw-r--r--tex/context/base/spac-ver.mkiv3
-rw-r--r--tex/context/base/status-files.pdfbin24768 -> 24752 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin324988 -> 325239 bytes
-rw-r--r--tex/context/base/status-mkiv.lua12
-rw-r--r--tex/context/base/strc-tag.lua10
-rw-r--r--tex/context/base/strc-tag.mkiv4
-rw-r--r--tex/context/base/typo-mar.lua9
-rw-r--r--tex/context/base/x-math-svg.lua162
-rw-r--r--tex/context/base/x-mathml.mkiv266
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua2
30 files changed, 2499 insertions, 636 deletions
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index 74640ad08..e64b7b77c 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -28,6 +28,10 @@ if not modules then modules = { } end modules ['back-exp'] = {
-- todo: move critital formatters out of functions
-- todo: delay loading (apart from basic tag stuff)
+-- problem : too many local variables
+
+-- check setting __i__
+
local next, type, tonumber = next, type, tonumber
local format, concat, sub, gsub = string.format, table.concat, string.sub, string.gsub
local validstring = string.valid
@@ -42,6 +46,7 @@ local replacetemplate = utilities.templates.replace
local trace_export = false trackers.register ("export.trace", function(v) trace_export = v end)
local trace_spacing = false trackers.register ("export.trace.spacing", function(v) trace_spacing = v end)
+
local less_state = false directives.register("export.lessstate", function(v) less_state = v end)
local show_comment = true directives.register("export.comment", function(v) show_comment = v end)
@@ -233,30 +238,35 @@ local namespaced = {
}
local namespaces = {
- msubsup = "m",
- msub = "m",
- msup = "m",
- mn = "m",
- mi = "m",
- ms = "m",
- mo = "m",
- mtext = "m",
- mrow = "m",
- mfrac = "m",
- mroot = "m",
- msqrt = "m",
- munderover = "m",
- munder = "m",
- mover = "m",
- merror = "m",
- math = "m",
- mrow = "m",
- mtable = "m",
- mtr = "m",
- mtd = "m",
- mfenced = "m",
- maction = "m",
- mspace = "m",
+ msubsup = "m",
+ msub = "m",
+ msup = "m",
+ mn = "m",
+ mi = "m",
+ ms = "m",
+ mo = "m",
+ mtext = "m",
+ mrow = "m",
+ mfrac = "m",
+ mroot = "m",
+ msqrt = "m",
+ munderover = "m",
+ munder = "m",
+ mover = "m",
+ merror = "m",
+ math = "m",
+ mrow = "m",
+ mtable = "m",
+ mtr = "m",
+ mtd = "m",
+ mfenced = "m",
+ maction = "m",
+ mspace = "m",
+ -- only when testing
+ mstacker = "m",
+ mstackertop = "m",
+ mstackermid = "m",
+ mstackernbot = "m",
}
setmetatableindex(namespaced, function(t,k)
@@ -356,8 +366,8 @@ local styletemplate = [[
color : %color% ;
}]]
- function wrapups.allusedstyles(xmlfile)
- local result = { formatters["/* %s for file %s */"]("styles",xmlfile) }
+ function wrapups.allusedstyles(basename)
+ local result = { formatters["/* %s for file %s */"]("styles",basename) }
--
local bodyfont = finetuning.bodyfont
local width = finetuning.width
@@ -439,11 +449,10 @@ local imagetemplate = [[
end
end
- local f_images = formatters["/* %s for file %s */"]
local collected = { }
- function wrapups.allusedimages(xmlfile)
- local result = { f_images("images",xmlfile) }
+ function wrapups.allusedimages(basename)
+ local result = { formatters["/* %s for file %s */"]("images",basename) }
for element, details in sortedhash(usedimages) do
for detail, data in sortedhash(details) do
local name = data.name
@@ -1053,8 +1062,44 @@ do
element = "mtext",
data = { content = "" },
nature = "inline",
+ comment = "dummy nucleus"
}
+ local function accentchar(d)
+ for i=1,3 do
+ d = d.data
+ if not d then
+ return
+ end
+ d = d[1]
+ if not d then
+ return
+ end
+ local tg = d.tg
+ if tg == "mover" then
+ local p = properties[d.fulltag]
+ local t = p.top
+ if t then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(t)
+ d.data = { d1 }
+ return d
+ end
+ elseif tg == "munder" then
+ local p = properties[d.fulltag]
+ local b = p.bottom
+ if b then
+ d = d.data[1]
+ local d1 = d.data[1]
+ d1.content = utfchar(b)
+ d.data = { d1 }
+ return d
+ end
+ end
+ end
+ end
+
local function checkmath(root) -- we can provide utf.toentities as an option
local data = root.data
if data then
@@ -1093,33 +1138,13 @@ do
-- data[1] = dummy_nucleus
-- end
elseif roottg == "mfenced" then
- local new, n = { }, 0
- local attributes = { }
- root.attributes = attributes
- for i=1,ndata do
- local di = data[i]
- if not di then
- -- weird
- elseif di.content then
- n = n + 1
- new[n] = di
- else
- local tg = di.tg
- if tg == "mleft" then
- attributes.left = tostring(di.data[1].data[1].content)
- elseif tg == "mmiddle" then
- attributes.middle = tostring(di.data[1].data[1].content)
- elseif tg == "mright" then
- attributes.right = tostring(di.data[1].data[1].content)
- else
- n = n + 1
- di.__i__ = n
- new[n] = di
- end
- end
- end
- root.data = new
- ndata = n
+ local p = properties[root.fulltag]
+ local l, m, r = p.left, p.middle, p.right
+ root.attributes = {
+ left = l and utfchar(l),
+ middle = m and utfchar(m),
+ right = r and utfchar(r),
+ }
end
if ndata == 0 then
return
@@ -1156,9 +1181,122 @@ do
di.skip = "comment"
checkmath(di)
i = i + 1
- elseif tg == "mover" or tg == "munder" or tg == "munderover" then
+ elseif tg == "mover" then
+ if detail == "accent" then
+ local p = properties[di.fulltag]
+ local t = p.top
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.topfixed
+ di.detail = nil
+ if t then
+ -- mover
+ d[1].data[1].content = utfchar(t)
+ di.data = { d[2], d[1] }
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munder" then
if detail == "accent" then
- di.attributes = { accent = "true" }
+ local p = properties[di.fulltag]
+ local b = p.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ }
+ -- todo: p.bottomfixed
+ di.detail = nil
+ if b then
+ -- munder
+ d[2].data[1].content = utfchar(b)
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "munderover" then
+ if detail == "accent" then
+ local p = properties[di.fulltag]
+ local t = p.top
+ local b = p.bottom
+ local d = di.data
+ -- todo: accent = "false" (for scripts like limits)
+ -- todo: accentunder = "false" (for scripts like limits)
+ di.attributes = {
+ accent = "true",
+ accentunder = "true",
+ }
+ -- todo: p.topfixed
+ -- todo: p.bottomfixed
+ di.detail = nil
+ if t and b then
+ -- munderover
+ d[1].data[1].content = utfchar(t)
+ d[3].data[1].content = utfchar(b)
+ di.data = { d[2], d[3], d[1] }
+ else
+ -- can't happen
+ end
+ else
+ -- can't happen
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mstacker" then
+ local d = di.data
+ local d1 = d[1]
+ local d2 = d[2]
+ local d3 = d[3]
+ local t1 = d1 and d1.tg
+ local t2 = d2 and d2.tg
+ local t3 = d3 and d3.tg
+ local m = nil -- d1.data[1]
+ local t = nil
+ local b = nil
+ -- only accent when top / bot have stretch
+ if t1 == "mstackermid" then
+ m = accentchar(d1) -- or m
+ if t2 == "mstackertop" then
+ if t3 == "mstackerbot" then
+ t = accentchar(d2)
+ b = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], b or d3.data[1], t or d2.data[1] }
+ else
+ t = accentchar(d2)
+ di.element = "mover"
+ di.data = { m or d1.data[1], t or d2.data[1] }
+ end
+ elseif t2 == "mstackerbot" then
+ if t3 == "mstackertop" then
+ b = accentchar(d2)
+ t = accentchar(d3)
+ di.element = "munderover"
+ di.data = { m or d1.data[1], t or d3.data[1], m, b or d2.data[1] }
+ else
+ b = accentchar(d2)
+ di.element = "munder"
+ di.data = { m or d1.data[1], b or d2.data[1] }
+ end
+ else
+ -- can't happen
+ end
+ else
+ -- can't happen
+ end
+ if t or b then
+ di.attributes = {
+ accent = t and "true" or nil,
+ accentunder = b and "true" or nil,
+ }
di.detail = nil
end
checkmath(di)
@@ -1173,7 +1311,34 @@ do
elseif tg == "break" then
di.skip = "comment"
i = i + 1
- elseif tg == "mrow" and detail then
+ elseif tg == "mtext" then
+ -- this is only needed for unboxed mtexts ... all kind of special
+ -- tex border cases and optimizations ... trial and error
+ local data = di.data
+ if #data > 1 then
+ for i=1,#data do
+ local di = data[i]
+ local content = di.content
+ if content then
+ data[i] = {
+ element = "mtext",
+ nature = "inline",
+ data = { di },
+ n = 0,
+ }
+ elseif di.tg == "math" then
+ local di = di.data[1]
+ data[i] = di
+ checkmath(di)
+ end
+ end
+ di.element = "mrow"
+ -- di.tg = "mrow"
+ -- di.nature = "inline"
+ end
+ checkmath(di)
+ i = i + 1
+ elseif tg == "mrow" and detail then -- hm, falls through
di.detail = nil
checkmath(di)
di = {
@@ -1272,21 +1437,47 @@ do
local ndata = #data
local n = 0
for i=1,ndata do
- local di = data[i]
- if di and not di.content then
- di = stripmath(di)
+ local d = data[i]
+ if d and not d.content then
+ d = stripmath(d)
end
- if di then
- local content = di.content
+ if d then
+ local content = d.content
if not content then
n = n + 1
- di.__i__ = n
- data[n] = di
+ d.__i__ = n
+ data[n] = d
elseif content == " " or content == "" then
- -- skip
+ if di.tg == "mspace" then
+ -- we append or prepend a space to a preceding or following mtext
+ local parent = di.__p__
+ local index = di.__i__ -- == i
+ local data = parent.data
+ if index > 1 then
+ local d = data[index-1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[#dd]
+ local dc = dn.content
+ if dc then
+ dn.content = dc .. content
+ end
+ end
+ elseif index < ndata then
+ local d = data[index+1]
+ if d.tg == "mtext" then
+ local dd = d.data
+ local dn = dd[1]
+ local dc = dn.content
+ if dc then
+ dn.content = content .. dc
+ end
+ end
+ end
+ end
else
n = n + 1
- data[n] = di
+ data[n] = d
end
end
end
@@ -1296,7 +1487,16 @@ do
if #data > 0 then
return di
end
+-- end
end
+ -- could be integrated but is messy then
+-- while roottg == "mrow" and #data == 1 do
+-- data = data[1]
+-- for k, v in next, data do
+-- root[k] = v
+-- end
+-- roottg = data.tg
+-- end
end
end
@@ -1795,46 +1995,50 @@ do
local di = data[i]
if not di then -- hm, di can be string
-- whatever
- elseif di.content then
- -- already has breaks
- local content = lpegmatch(p_entity,di.content)
- if i == nofdata and sub(content,-1) == "\n" then -- move check
- -- can be an end of line in par but can also be the last line
- if trace_spacing then
- result[#result+1] = f_spacing(di.parnumber or 0,sub(content,1,-2))
+ else
+ local content = di.content
+-- also optimize for content == "" : trace that first
+ if content then
+ -- already has breaks
+ local content = lpegmatch(p_entity,content)
+ if i == nofdata and sub(content,-1) == "\n" then -- move check
+ -- can be an end of line in par but can also be the last line
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,sub(content,1,-2))
+ else
+ result[#result+1] = sub(content,1,-2)
+ end
+ result[#result+1] = " "
else
- result[#result+1] = sub(content,1,-2)
+ if trace_spacing then
+ result[#result+1] = f_spacing(di.parnumber or 0,content)
+ else
+ result[#result+1] = content
+ end
end
- result[#result+1] = " "
- else
- if trace_spacing then
- result[#result+1] = f_spacing(di.parnumber or 0,content)
+ elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
+ local element = di.element
+ if not element then
+ -- skip
+ elseif element == "break" then -- or element == "pagebreak"
+ emptytag(result,element,nature,di)
+ elseif element == "" or di.skip == "ignore" then
+ -- skip
else
- result[#result+1] = content
- end
- end
- elseif not di.collapsed then -- ignore collapsed data (is appended, reconstructed par)
- local element = di.element
- if not element then
- -- skip
- elseif element == "break" then -- or element == "pagebreak"
- emptytag(result,element,nature,di)
- elseif element == "" or di.skip == "ignore" then
- -- skip
- else
- if di.before then
- flushtree(result,di.before,nature)
- end
- local natu = di.nature
- local skip = di.skip
- if di.breaknode then
- emptytag(result,"break","display",di)
- end
- begintag(result,element,natu,di,skip)
- flushtree(result,di.data,natu)
- endtag(result,element,natu,di,skip)
- if di.after then
- flushtree(result,di.after,nature)
+ if di.before then
+ flushtree(result,di.before,nature)
+ end
+ local natu = di.nature
+ local skip = di.skip
+ if di.breaknode then
+ emptytag(result,"break","display",di)
+ end
+ begintag(result,element,natu,di,skip)
+ flushtree(result,di.data,natu)
+ endtag(result,element,natu,di,skip)
+ if di.after then
+ flushtree(result,di.after,nature)
+ end
end
end
end
@@ -2222,12 +2426,13 @@ end
-- whatsit_code localpar_code
-local function collectresults(head,list) -- is last used (we also have currentattribute)
+local function collectresults(head,list,pat,pap) -- is last used (we also have currentattribute)
local p
for n in traverse_nodes(head) do
local id = getid(n) -- 14: image, 8: literal (mp)
if id == glyph_code then
local at = getattr(n,a_tagged)
+or pat
if not at then
-- we need to tag the pagebody stuff as being valid skippable
--
@@ -2236,7 +2441,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- we could add tonunicodes for ligatures (todo)
local components = getfield(n,"components")
if components then -- we loose data
- collectresults(components,nil)
+ collectresults(components,nil,at) -- this assumes that components have the same attribute as the glyph ... we should be more tolerant (see math)
else
local c = getchar(n)
if last ~= at then
@@ -2244,6 +2449,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
pushcontent()
currentnesting = tl
currentparagraph = getattr(n,a_taggedpar)
+or pap
currentattribute = at
last = at
pushentry(currentnesting)
@@ -2262,6 +2468,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- information unless we inject a special node (but even then we can run into nesting
-- issues)
local ap = getattr(n,a_taggedpar)
+or pap
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -2338,6 +2545,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- skip this one ... already converted special character (node-acc)
elseif ca then
local a = getattr(n,a_tagged)
+or pat
if a then
local c = specialspaces[ca]
if last ~= a then
@@ -2348,12 +2556,14 @@ local function collectresults(head,list) -- is last used (we also have currentat
pushcontent()
currentnesting = tl
currentparagraph = getattr(n,a_taggedpar)
+or pap
currentattribute = a
last = a
pushentry(currentnesting)
-- no reference check (see above)
elseif last then
local ap = getattr(n,a_taggedpar)
+or pap
if ap ~= currentparagraph then
pushcontent(currentparagraph,ap)
pushentry(currentnesting)
@@ -2376,9 +2586,11 @@ local function collectresults(head,list) -- is last used (we also have currentat
else
local subtype = getsubtype(n)
if subtype == userskip_code then
- if getfield(getfield(n,"spec"),"width") > threshold then
+ local spec = getfield(n,"spec")
+ if getfield(spec,"width") > threshold then
if last and not somespace[currentcontent[nofcurrentcontent]] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 5a -->",currentdepth)
@@ -2406,6 +2618,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
elseif subtype == spaceskip_code or subtype == xspaceskip_code then
if not somespace[currentcontent[nofcurrentcontent]] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 7 (stay in element) -->",currentdepth)
@@ -2435,6 +2648,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
end
elseif not somespace[r] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if trace_export then
report_export("%w<!-- injecting spacing 1 (end of line, stay in element) -->",currentdepth)
@@ -2465,6 +2679,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
local ai = getattr(n,a_image)
if ai then
local at = getattr(n,a_tagged)
+or pat
if nofcurrentcontent > 0 then
pushcontent()
pushentry(currentnesting) -- ??
@@ -2479,7 +2694,9 @@ local function collectresults(head,list) -- is last used (we also have currentat
-- we need to determine an end-of-line
local list = getlist(n)
if list then
- collectresults(list,n)
+local at = getattr(n,a_tagged)
+or pat
+ collectresults(list,n,at)
end
end
elseif id == kern_code then
@@ -2492,6 +2709,7 @@ local function collectresults(head,list) -- is last used (we also have currentat
if kern > limit then
if last and not somespace[currentcontent[nofcurrentcontent]] then
local a = getattr(n,a_tagged)
+or pat
if a == last then
if not somespace[currentcontent[nofcurrentcontent]] then
if trace_export then
@@ -2531,6 +2749,19 @@ function nodes.handlers.export(head) -- hooks into the page builder
end
-- continueexport()
restart = true
+
+-- local function f(head,depth,pat)
+-- for n in node.traverse(head) do
+-- local a = n[a_tagged] or pat
+-- local t = taglist[a]
+-- print(depth,n,a,t and table.concat(t," "))
+-- if n.id == hlist_code or n.id == vlist_code and n.list then
+-- f(n.list,depth+1,a)
+-- end
+-- end
+-- end
+-- f(head,1)
+
collectresults(tonut(head))
if trace_export then
report_export("%w<!-- stop flushing page -->",currentdepth)
@@ -2578,31 +2809,35 @@ local f_cssheadlink = formatters [ [[
<link type="text/css" rel="stylesheet" href="%s"/>
]] ]
- local function allusedstylesheets(xmlfile,cssfiles,files)
+ local function allusedstylesheets(cssfiles,files,path)
+ local done = { }
local result = { }
local extras = { }
for i=1,#cssfiles do
local cssfile = cssfiles[i]
- if type(cssfile) ~= "string" or cssfile == v_yes or cssfile == "" or cssfile == xmlfile then
- cssfile = file.replacesuffix(xmlfile,"css")
- else
- cssfile = file.addsuffix(cssfile,"css")
+ if type(cssfile) ~= "string" then
+ -- error
+ elseif cssfile == "export-example.css" then
+ -- ignore
+ elseif not done[cssfile] then
+ cssfile = file.join(path,cssfile)
+ report_export("adding css reference '%s'",cssfile)
+ files[#files+1] = cssfile
+ result[#result+1] = f_csspreamble(cssfile)
+ extras[#extras+1] = f_cssheadlink(cssfile)
+ done[cssfile] = true
end
- files[#files+1] = cssfile
- report_export("adding css reference '%s'",cssfile)
- result[#result+1] = f_csspreamble(cssfile)
- extras[#extras+1] = f_cssheadlink(cssfile)
end
return concat(result), concat(extras)
end
local f_e_template = [[
-%element% {
+%element%, div.%element% {
display: %display% ;
}]]
local f_d_template = [[
-%element%[detail=%detail%], div.detail-%detail% {
+%element%[detail=%detail%], div.%element%.detail-%detail% {
display: %display% ;
}]]
@@ -2640,23 +2875,27 @@ local htmltemplate = [[
mixed = "inline",
}
- local function allusedelements(xmlfile)
- local result = { formatters["/* %s for file %s */"]("template",xmlfile) }
+ local function allusedelements(basename)
+ local result = { formatters["/* %s for file %s */"]("template",basename) }
for element, details in sortedhash(used) do
- result[#result+1] = f_category(element)
- for detail, nature in sortedhash(details) do
- local display = displaymapping[nature or "display"] or "block"
- if detail == "" then
- result[#result+1] = replacetemplate(f_e_template, {
- element = element,
- display = display,
- })
- else
- result[#result+1] = replacetemplate(f_d_template, {
- element = element,
- detail = detail,
- display = display,
- })
+ if namespaces[element] then
+ -- skip math
+ else
+ result[#result+1] = f_category(element)
+ for detail, nature in sortedhash(details) do
+ local display = displaymapping[nature or "display"] or "block"
+ if detail == "" then
+ result[#result+1] = replacetemplate(f_e_template, {
+ element = element,
+ display = display,
+ })
+ else
+ result[#result+1] = replacetemplate(f_d_template, {
+ element = element,
+ detail = detail,
+ display = display,
+ })
+ end
end
end
end
@@ -2771,8 +3010,11 @@ local htmltemplate = [[
--
}
- local addclicks = true
- local f_onclick = formatters[ [[location.href='%s']] ]
+ local addclicks = true
+ local f_onclick = formatters[ [[location.href='%s']] ]
+
+ local p_cleanid = lpeg.replacer { [":"] = "-" }
+ local p_cleanhref = lpeg.Cs(lpeg.P("#") * p_cleanid)
local function remap(specification,source,target)
local comment = nil -- share comments
@@ -2781,8 +3023,10 @@ local htmltemplate = [[
local tg = c.tg
local ns = c.ns
if ns == "m" then
+if false then
c.ns = ""
c.at["xmlns:m"] = nil
+end
-- elseif tg == "a" then
-- c.ns = ""
else
@@ -2813,7 +3057,9 @@ local htmltemplate = [[
local href = at.href
local class = concat(class," ")
if id then
+ id = lpegmatch(p_cleanid, id) or id
if href then
+ href = lpegmatch(p_cleanhref,href) or href
c.at = {
class = class,
id = id,
@@ -2828,6 +3074,7 @@ local htmltemplate = [[
end
else
if href then
+ href = lpegmatch(p_cleanhref,href) or href
c.at = {
class = class,
href = href,
@@ -2845,10 +3092,7 @@ local htmltemplate = [[
end
end
- local cssfile, xhtmlfile = nil, nil
-
- directives.register("backend.export.css", function(v) cssfile = v end)
- directives.register("backend.export.xhtml", function(v) xhtmlfile = v end)
+ local cssfile = nil directives.register("backend.export.css", function(v) cssfile = v end)
local function stopexport(v)
starttiming(treehash)
@@ -2856,11 +3100,7 @@ local htmltemplate = [[
finishexport()
--
report_export("")
- if xhtmlfile then
- report_export("exporting xml, xhtml and html files")
- else
- report_export("exporting xml file")
- end
+ report_export("exporting xml, xhtml and html files")
report_export("")
--
wrapups.collapsetree(tree)
@@ -2874,101 +3114,190 @@ local htmltemplate = [[
if type(v) ~= "string" or v == v_yes or v == "" then
v = tex.jobname
end
- local basename = file.basename(v)
- local xmlfile = file.addsuffix(basename,"export")
- --
- local imagefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-images","css")
- local stylefilename = file.addsuffix(file.removesuffix(xmlfile) .. "-styles","css")
- local templatefilename = file.replacesuffix(xmlfile,"template")
- local specificationfilename = file.replacesuffix(xmlfile,"specification")
+
+ -- we use a dedicated subpath:
--
- if xhtml and not cssfile then
- cssfile = true
- end
- local cssfiles = { }
- if cssfile then
- if cssfile == true then
- cssfiles = { "export-example.css" }
+ -- ./jobname-export
+ -- ./jobname-export/images
+ -- ./jobname-export/styles
+ -- ./jobname-export/styles
+ -- ./jobname-export/jobname-export.xml
+ -- ./jobname-export/jobname-export.xhtml
+ -- ./jobname-export/jobname-export.html
+ -- ./jobname-export/jobname-specification.lua
+ -- ./jobname-export/styles/jobname-defaults.css
+ -- ./jobname-export/styles/jobname-styles.css
+ -- ./jobname-export/styles/jobname-images.css
+ -- ./jobname-export/styles/jobname-templates.css
+
+ local basename = file.basename(v)
+ local corename = file.removesuffix(basename)
+ local basepath = basename .. "-export"
+ local imagepath = file.join(basepath,"images")
+ local stylepath = file.join(basepath,"styles")
+
+ local function validpath(what,pathname)
+ if lfs.isdir(pathname) then
+ report_export("using exiting %s path %a",what,pathname)
+ return pathname
+ end
+ lfs.mkdir(pathname)
+ if lfs.isdir(pathname) then
+ report_export("using cretated %s path %a",what,basepath)
+ return pathname
else
- cssfiles = settings_to_array(cssfile or "")
+ report_export("unable to create %s path %a",what,basepath)
+ return false
end
- insert(cssfiles,1,imagefilename)
- insert(cssfiles,1,stylefilename)
end
- cssfiles = table.unique(cssfiles)
+
+ if not (validpath("export",basepath) and validpath("images",imagepath) and validpath("styles",stylepath)) then
+ return
+ end
+
+ -- we're now on the dedicated export subpath so we can't clash names
+
+ local xmlfilebase = file.addsuffix(basename .. "-raw","xml" )
+ local xhtmlfilebase = file.addsuffix(basename .. "-tag","xhtml")
+ local htmlfilebase = file.addsuffix(basename .. "-div","xhtml")
+ local specificationfilebase = file.addsuffix(basename .. "-pub","lua" )
+
+ local xmlfilename = file.join(basepath, xmlfilebase )
+ local xhtmlfilename = file.join(basepath, xhtmlfilebase )
+ local htmlfilename = file.join(basepath, htmlfilebase )
+ local specificationfilename = file.join(basepath, specificationfilebase)
--
- local result = allcontent(tree) -- also does some housekeeping and data collecting
+ local defaultfilebase = file.addsuffix(basename .. "-defaults", "css")
+ local imagefilebase = file.addsuffix(basename .. "-images", "css")
+ local stylefilebase = file.addsuffix(basename .. "-styles", "css")
+ local templatefilebase = file.addsuffix(basename .. "-templates","css")
--
+ local defaultfilename = file.join(stylepath,defaultfilebase )
+ local imagefilename = file.join(stylepath,imagefilebase )
+ local stylefilename = file.join(stylepath,stylefilebase )
+ local templatefilename = file.join(stylepath,templatefilebase)
+
+ -- we keep track of all used files
+
local files = {
}
- local x_styles, h_styles = allusedstylesheets(xmlfile,cssfiles,files)
+
+ -- we always load the defaults and optionally extra css files; we also copy the example
+ -- css file so that we always have the latest version
+
+ local cssfiles = {
+ defaultfilebase,
+ imagefilebase,
+ stylefilebase,
+ }
+
+ local examplefilename = resolvers.find_file("export-example.css")
+ if examplefilename then
+ file.copy(examplefilename,defaultfilename)
+ end
+
+ if type(cssfile) == "string" then
+ local list = table.unique(settings_to_array(cssfile))
+ for i=1,#list do
+ local source = file.addsuffix(list[i],"css")
+ local target = source
+ cssfiles[#cssfiles+1] = target
+ -- todo: warning if no file yet
+ end
+ end
+
+ local x_styles, h_styles = allusedstylesheets(cssfiles,files,"styles")
+
+ -- at this point we're ready for the content; the collector also does some
+ -- housekeeping and data collecting; at this point we still have an xml
+ -- representation that uses verbose element names and carries information in
+ -- attributes
+
+ local result = allcontent(tree)
+
local results = concat {
wholepreamble(true),
x_styles, -- adds to files
result,
}
- --
- files = table.unique(files)
- --
- report_export("saving xml data in %a",xmlfile)
- io.savedata(xmlfile,results)
- --
+
+ cssfiles = table.unique(cssfiles)
+
+ -- we're now ready for saving the result in the xml file
+
+ report_export("saving xml data in %a",xmlfilename)
+ io.savedata(xmlfilename,results)
+
report_export("saving css image definitions in %a",imagefilename)
- io.savedata(imagefilename,wrapups.allusedimages(xmlfile))
- --
+ io.savedata(imagefilename,wrapups.allusedimages(basename))
+
report_export("saving css style definitions in %a",stylefilename)
- io.savedata(stylefilename,wrapups.allusedstyles(xmlfile))
- --
+ io.savedata(stylefilename,wrapups.allusedstyles(basename))
+
report_export("saving css template in %a",templatefilename)
- io.savedata(templatefilename,allusedelements(xmlfile))
- --
- local xmltree = nil
- if xhtmlfile then
- -- basic
- if type(v) ~= "string" or xhtmlfile == true or xhtmlfile == v_yes or xhtmlfile == "" or xhtmlfile == xmlfile then
- xhtmlfile = file.replacesuffix(xmlfile,"xhtml")
- else
- xhtmlfile = file.addsuffix(xhtmlfile,"xhtml")
- end
- files[#files+1] = xhtmlfile
- report_export("saving xhtml variant in %a",xhtmlfile)
- xmltree = cleanxhtmltree(xml.convert(results))
- xml.save(xmltree,xhtmlfile)
- -- looking at identity is somewhat redundant as we also inherit from interaction
- -- at the tex end
- local identity = interactions.general.getidentity()
- local specification = {
- name = file.removesuffix(v),
- identifier = os.uuid(),
- images = wrapups.uniqueusedimages(),
- imagefile = imagefilename,
- stylefile = stylefilename,
- root = xhtmlfile,
- files = files,
- language = languagenames[texgetcount("mainlanguagenumber")],
- title = validstring(finetuning.title) or validstring(identity.title),
- subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
- author = validstring(finetuning.author) or validstring(identity.author),
- firstpage = validstring(finetuning.firstpage),
- lastpage = validstring(finetuning.lastpage),
- }
- report_export("saving specification in %a",specificationfilename,specificationfilename)
- io.savedata(specificationfilename,table.serialize(specification,true))
- -- bonus
- local resultfile = file.replacesuffix(xmlfile,"html")
- report_export("saving div based alternative in %a",resultfile)
- remap(specification,xmltree)
- local variables = {
- style = h_styles,
- body = xml.tostring(xml.first(xmltree,"/div")),
- preamble = wholepreamble(false),
- title = specification.title,
- }
- io.savedata(resultfile,replacetemplate(htmltemplate,variables,"xml"))
- report_export("")
- report_export([[create epub with: mtxrun --script epub --make "%s"]],file.nameonly(resultfile))
- report_export("")
- end
+ io.savedata(templatefilename,allusedelements(basename))
+
+ -- additionally we save an xhtml file; for that we load the file as xml tree
+
+ report_export("saving xhtml variant in %a",xhtmlfilename)
+
+ local xmltree = cleanxhtmltree(xml.convert(results))
+
+ xml.save(xmltree,xhtmlfilename)
+
+ -- now we save a specification file that can b eused for generating an epub file
+
+ -- looking at identity is somewhat redundant as we also inherit from interaction
+ -- at the tex end
+
+ local identity = interactions.general.getidentity()
+
+ local specification = {
+ name = file.removesuffix(v),
+ identifier = os.uuid(),
+ images = wrapups.uniqueusedimages(),
+ imagefile = file.join("styles",imagefilebase),
+ imagepath = "images",
+ stylepath = "styles",
+ xmlfiles = { xmlfilebase },
+ xhtmlfiles = { xhtmlfilebase },
+ htmlfiles = { htmlfilebase },
+ styles = cssfiles,
+ htmlroot = htmlfilebase,
+ language = languagenames[texgetcount("mainlanguagenumber")],
+ title = validstring(finetuning.title) or validstring(identity.title),
+ subtitle = validstring(finetuning.subtitle) or validstring(identity.subtitle),
+ author = validstring(finetuning.author) or validstring(identity.author),
+ firstpage = validstring(finetuning.firstpage),
+ lastpage = validstring(finetuning.lastpage),
+ }
+
+ report_export("saving specification in %a",specificationfilename,specificationfilename)
+
+ io.savedata(specificationfilename,table.serialize(specification,true))
+
+ -- the html export for epub is different in the sense that it uses div's instead of
+ -- specific tags
+
+ report_export("saving div based alternative in %a",htmlfilename)
+
+ remap(specification,xmltree)
+
+ local variables = {
+ style = h_styles,
+ body = xml.tostring(xml.first(xmltree,"/div")),
+ preamble = wholepreamble(false),
+ title = specification.title,
+ }
+
+ io.savedata(htmlfilename,replacetemplate(htmltemplate,variables,"xml"))
+
+ -- finally we report how an epub file can be made (using the specification)
+
+ report_export("")
+ report_export('create epub with: mtxrun --script epub --make "%s" [--purge --rename --svgmath]',file.nameonly(basename))
+ report_export("")
+
stoptiming(treehash)
end
diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv
index bda056fac..7a9824555 100644
--- a/tex/context/base/back-exp.mkiv
+++ b/tex/context/base/back-exp.mkiv
@@ -164,10 +164,11 @@
% \c!lastpage=, % imagename
\c!alternative=, % html, div
\c!properties=\v!no, % no: ignore, yes: as attribute, otherwise: use as prefix
- \c!hyphen=\v!no]
+ \c!hyphen=\v!no,
+ \c!svgstyle=]
\setupbackend
- [css=export-example.css]
+ [css=] % ?
\def\dosynchronizeexport
{\let\currentexport\empty
@@ -182,6 +183,7 @@
author = \!!bs\exportparameter\c!author\!!es,
firstpage = "\exportparameter\c!firstpage",
lastpage = "\exportparameter\c!lastpage",
+ svgstyle = "\exportparameter\c!svgstyle",
}}}
\appendtoks
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 37a9ead0f..a9ae0d52a 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2014.09.18 11:17}
+\newcontextversion{2014.09.25 18:58}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index e0c719446..3cdfd0e80 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index f060a710b..617c2423f 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -28,7 +28,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2014.09.18 11:17}
+\edef\contextversion{2014.09.25 18:58}
\edef\contextkind {beta}
%D For those who want to use this:
@@ -184,6 +184,7 @@
\loadmarkfile{hand-ini}
\loadmarkfile{lang-ini}
+\loadmarkfile{lang-hyp}
\loadmarkfile{lang-lab}
\loadmarkfile{unic-ini}
diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua
index 3dd16c23b..13d7627d2 100644
--- a/tex/context/base/data-res.lua
+++ b/tex/context/base/data-res.lua
@@ -1459,6 +1459,9 @@ end
-- -- -- end of main file search routing -- -- --
local function findfiles(filename,filetype,allresults)
+ if not filename or filename == "" then
+ return { }
+ end
local result, status = collect_instance_files(filename,filetype or "",allresults)
if not result or #result == 0 then
local lowered = lower(filename)
diff --git a/tex/context/base/data-sch.lua b/tex/context/base/data-sch.lua
index adc774489..1e1077b03 100644
--- a/tex/context/base/data-sch.lua
+++ b/tex/context/base/data-sch.lua
@@ -31,8 +31,18 @@ function cleaners.none(specification)
return specification.original
end
-function cleaners.strip(specification)
- return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+-- function cleaners.strip(specification)
+-- -- todo: only keep suffix periods, so after the last
+-- return (gsub(specification.original,"[^%a%d%.]+","-")) -- so we keep periods
+-- end
+
+function cleaners.strip(specification) -- keep suffixes
+ local path, name = file.splitbase(specification.original)
+ if path == "" then
+ return (gsub(name,"[^%a%d%.]+","-"))
+ else
+ return (gsub((gsub(path,"%.","-") .. "-" .. name),"[^%a%d%.]+","-"))
+ end
end
function cleaners.md5(specification)
diff --git a/tex/context/base/export-example.css b/tex/context/base/export-example.css
index f78014a5d..2962a790f 100644
--- a/tex/context/base/export-example.css
+++ b/tex/context/base/export-example.css
@@ -687,7 +687,19 @@ link, div.link {
/* margintextblock : inline */
/* margintext : inline */
-/* math : inline */
+margintext, div.margintext {
+ display : block ;
+ font-weight : bold ;
+ margin-top : 1em ;
+ margin-bottom : 1em ;
+}
+
+margintext:before, div.margintext:before {
+ content : "\25B6\00A0\00A0" ;
+ color : rgb(40%,40%,40%) ;
+}
+
+/* math : inline | display */
/* mn : mixed */
/* mi : mixed */
/* mo : mixed */
@@ -710,6 +722,16 @@ link, div.link {
/* mtr : display */
/* mtd : display */
+div.math-inline {
+ display : inline ;
+ vertical-align : 0 ; /* this will be set directly */
+}
+
+div.math-display {
+ display : block ;
+ margin : 1ex 0ex 1em 3em ;
+}
+
/* quantity : inline */
/* unit : inline */
/* number : inline */
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index d3b13a680..f83c759b3 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -1362,6 +1362,11 @@ end
local function runprogram(binary,argument,variables)
-- os.which remembers found programs
+-- if not variables and type(binary) == "table" and binary.command then
+-- variables = argument
+-- argument = binary.argument
+-- binary = binary.command
+-- end
local found = nil
if type(binary) == "table" then
for i=1,#binary do
@@ -1403,6 +1408,8 @@ local epsconverter = converters.eps or { }
converters.eps = epsconverter
converters.ps = epsconverter
+-- todo: colorspace
+
local epstopdf = {
resolutions = {
[v_low] = "screen",
@@ -1483,22 +1490,22 @@ epsconverter.default = epsconverter.pdf
local pdfconverter = converters.pdf or { }
converters.pdf = pdfconverter
-programs.pdftoeps = {
- command = "pdftops",
- argument = [[-eps "%oldname%" "%newname%]],
-}
-
-pdfconverter.stripped = function(oldname,newname)
- local pdftoeps = programs.pdftoeps -- can be changed
- local epstopdf = programs.epstopdf -- can be changed
- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
- local tmpname = newname .. ".tmp"
- runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
- runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
- os.remove(tmpname)
-end
-
-figures.registersuffix("stripped","pdf")
+-- programs.pdftoeps = {
+-- command = "pdftops",
+-- argument = [[-eps "%oldname%" "%newname%"]],
+-- }
+--
+-- pdfconverter.stripped = function(oldname,newname)
+-- local pdftoeps = programs.pdftoeps -- can be changed
+-- local epstopdf = programs.epstopdf -- can be changed
+-- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+-- local tmpname = newname .. ".tmp"
+-- runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
+-- runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
+-- os.remove(tmpname)
+-- end
+--
+-- figures.registersuffix("stripped","pdf")
-- -- -- svg -- -- --
@@ -1575,6 +1582,70 @@ bmpconverter.default = converter
-- todo: lowres
+-- cmyk conversion
+
+local rgbprofile = "srgb.icc"
+local cmykprofile = "isocoated_v2_eci.icc"
+
+directives.register("graphics.conversion.rgbprofile", function(v) rgbprofile = type(v) == "string" and v or rgbprofile end)
+directives.register("graphics.conversion.cmykprofile",function(v) cmykprofile = type(v) == "string" and v or cmykprofile end)
+
+local function profiles()
+ if not lfs.isfile(rgbprofile) then
+ local found = resolvers.findfile(rgbprofile)
+ if found and found ~= "" then
+ rgbprofile = found
+ else
+ report_figures("unknown profile %a",rgbprofile)
+ end
+ end
+ if not lfs.isfile(cmykprofile) then
+ local found = resolvers.findfile(cmykprofile)
+ if found and found ~= "" then
+ cmykprofile = found
+ else
+ report_figures("unknown profile %a",cmykprofile)
+ end
+ end
+ return rgbprofile, cmykprofile
+end
+
+programs.pngtocmykpdf = {
+ command = "gm",
+ argument = [[convert -strip +profile "*" -profile "%rgbprofile%" -profile "%cmykprofile%" -colorspace cmyk -strip -sampling-factor 1x1 "%oldname%" "%newname%"]],
+}
+
+programs.jpgtocmykpdf = {
+ command = "gm",
+ argument = [[convert -strip +profile "*" -profile "%rgbprofile%" -profile "%cmykprofile%" -colorspace cmyk -strip -sampling-factor 1x1 -compress JPEG "%oldname%" "%newname%"]],
+}
+
+figures.converters.png = {
+ ["cmyk.pdf"] = function(oldname,newname,resolution)
+ local rgbprofile, cmykprofile = profiles()
+ runprogram(programs.pngtocmykpdf.command, programs.pngtocmykpdf.argument, {
+-- new: runprogram(programs.pngtocmykpdf, {
+ rgbprofile = rgbprofile,
+ cmykprofile = cmykprofile,
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+}
+
+figures.converters.jpg = {
+ ["cmyk.pdf"] = function(oldname,newname,resolution)
+ local rgbprofile, cmykprofile = profiles()
+ runprogram(programs.jpgtocmykpdf.command, programs.jpgtocmykpdf.argument, {
+-- new: runprogram(programs.jpgtocmykpdf, {
+ rgbprofile = rgbprofile,
+ cmykprofile = cmykprofile,
+ oldname = oldname,
+ newname = newname,
+ } )
+ end,
+}
+
-- -- -- bases -- -- --
local bases = allocate()
diff --git a/tex/context/base/lang-hyp.lua b/tex/context/base/lang-hyp.lua
new file mode 100644
index 000000000..3b5eac9ba
--- /dev/null
+++ b/tex/context/base/lang-hyp.lua
@@ -0,0 +1,663 @@
+if not modules then modules = { } end modules ['lang-hyp'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- In an automated workflow hypenation of long titles can be somewhat problematic
+-- especially when demands conflict. For that reason I played a bit with a Lua based
+-- variant of the traditional hyphenation machinery. This mechanism has been extended
+-- several times in projects, of which a good description can be found in TUGboat,
+-- Volume 27 (2006), No. 2 — Proceedings of EuroTEX2006: Automatic non-standard
+-- hyphenation in OpenOffice.org by László Németh.
+--
+-- Being the result of two days experimenting the following implementation is probably
+-- not completely okay yet. If there is demand I might add some more features and plugs.
+-- The performance is quite okay but can probably improved a bit, although this is not
+-- the most critital code.
+--
+-- . a l g o r i t h m .
+-- 4l1g4
+-- l g o3
+-- 1g o
+-- 2i t h
+-- 4h1m
+-- ---------------------
+-- 4 1 4 3 2 0 4 1
+-- a l-g o-r i t h-m
+
+-- . a s s z o n n y a l .
+-- s1s z/sz=sz,1,3
+-- n1n y/ny=ny,1,3
+-- -----------------------
+-- 0 1 0 0 0 1 0 0 0/sz=sz,2,3,ny=ny,6,3
+-- a s-s z o n-n y a l/sz=sz,2,3,ny=ny,6,3
+--
+-- ab1cd/ef=gh,2,2 : acd - efd (pattern/replacement,start,length
+
+local type, rawset, tonumber = type, rawset, tonumber
+
+local P, R, S, Cg, Cf, Ct, Cc, C, Carg, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cc, lpeg.C, lpeg.Carg, lpeg.Cs
+local lpegmatch = lpeg.match
+
+local concat = table.concat
+
+local utfchar = utf.char
+local utfbyte = utf.byte
+
+if not characters then
+ require("char-ini")
+end
+
+local setmetatableindex = table.setmetatableindex
+
+local languages = languages or { }
+local hyphenators = languages.hyphenators or { }
+languages.hyphenators = hyphenators
+local traditional = hyphenators.traditional or { }
+hyphenators.traditional = traditional
+
+local dictionaries = setmetatableindex(function(t,k)
+ local v = {
+ patterns = { },
+ hyphenated = { },
+ specials = { },
+ }
+ t[k] = v
+ return v
+end)
+
+local digit = R("09")
+local character = lpeg.patterns.utf8character - P("/")
+local splitpattern_k = Cs((digit/"" + character)^1)
+local splitpattern_v = Ct(((digit/tonumber + Cc(0)) * character)^1 * (digit/tonumber)^0)
+local splitpattern_v =
+ Ct(((digit/tonumber + Cc(0)) * character)^1 * (digit/tonumber)^0) *
+ (P("/") * Cf ( Ct("") *
+ Cg ( Cc("before") * C((1-lpeg.P("="))^1) * P("=") )
+ * Cg ( Cc("after") * C((1-lpeg.P(","))^1) * P(",") )
+ * Cg ( Cc("start") * ((1-lpeg.P(","))^1/tonumber) * P(",") )
+ * Cg ( Cc("length") * ((1-lpeg.P(-1) )^1/tonumber) )
+ , rawset))^-1
+
+local function register(patterns,specials,str,specification)
+ local k = lpegmatch(splitpattern_k,str)
+ local v1, v2 = lpegmatch(splitpattern_v,str)
+ patterns[k] = v1
+ if specification then
+ specials[k] = specification
+ elseif v2 then
+ specials[k] = v2
+ end
+end
+
+local word = ((Carg(1) * Carg(2) * C((1 - P(" "))^1)) / register + 1)^1
+local split = Ct(C(character)^1)
+
+function traditional.loadpatterns(language,filename)
+ local specification = require(filename)
+ local dictionary = dictionaries[language]
+ if specification then
+ local patterns = specification.patterns
+ if patterns then
+ lpegmatch(word,patterns.data,1,dictionary.patterns,dictionary.specials)
+ end
+ end
+ return dictionary
+end
+
+local lcchars = characters.lcchars
+local uccodes = characters.uccodes
+local nofwords = 0
+local nofhashed = 0
+
+local function hyphenate(dictionary,word)
+ nofwords = nofwords + 1
+ local hyphenated = dictionary.hyphenated
+ local isstring = type(word) == "string"
+ local done
+ if isstring then
+ done = hyphenated[word]
+ else
+ done = hyphenated[concat(word)]
+ end
+ if done ~= nil then
+ return done
+ else
+ done = false
+ end
+ local specials = dictionary.specials
+ local patterns = dictionary.patterns
+ local s = isstring and lpegmatch(split,word) or word
+ local l = #s
+ local w = { }
+ for i=1,l do
+ local si = s[i]
+ w[i] = lcchars[si] or si
+ end
+ local spec
+ for i=1,l do
+ for j=i,l do
+ local c = concat(w,"",i,j)
+ local m = patterns[c]
+ if m then
+ local s = specials[c]
+ if not done then
+ done = { }
+ spec = { }
+ for i=1,l do
+ done[i] = 0
+ end
+ end
+ for k=1,#m do
+ local new = m[k]
+ if not new then
+ break
+ elseif new > 0 then
+ local pos = i + k - 1
+ local old = done[pos]
+ if not old then
+ -- break ?
+ elseif new > old then
+ done[pos] = new
+ if s then
+ local b = i + s.start - 1
+ local e = b + s.length - 1
+ if pos >= b and pos <= e then
+ spec[pos] = s
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ local okay = false
+ for i=1,#done do
+ if done[i] % 2 == 1 then
+ done[i] = spec[i] or true
+ okay = true
+ else
+ done[i] = false
+ end
+ end
+ if not okay then
+ done = false
+ end
+ end
+ hyphenated[isstring and word or concat(word)] = done
+ nofhashed = nofhashed + 1
+ return done
+end
+
+local f_detail_1 = string.formatters["{%s}{%s}{}"]
+local f_detail_2 = string.formatters["{%s%s}{%s%s}{%s}"]
+
+function traditional.injecthyphens(dictionary,word,specification)
+ local h = hyphenate(dictionary,word)
+ if not h then
+ return word
+ end
+ local w = lpegmatch(split,word)
+ local r = { }
+ local l = #h
+ local n = 0
+ local i = 1
+ local leftmin = specification.lefthyphenmin or 2
+ local rightmin = l - (specification.righthyphenmin or left) + 1
+ local leftchar = specification.lefthyphenchar
+ local rightchar = specification.righthyphenchar
+ while i <= l do
+ if i > leftmin and i < rightmin then
+ local hi = h[i]
+ if not hi then
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ elseif hi == true then
+ n = n + 1
+ r[n] = f_detail_1(rightchar,leftchar)
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ else
+ local b = i - hi.start
+ local e = b + hi.length - 1
+ n = b
+ r[n] = f_detail_2(hi.before,rightchar,leftchar,hi.after,concat(w,"",b,e))
+ if e + 1 == i then
+ i = i + 1
+ else
+ i = e + 1
+ end
+ end
+ else
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ end
+ end
+ return concat(r)
+end
+
+function traditional.registerpattern(language,str,specification)
+ local dictionary = dictionaries[language]
+ register(dictionary.patterns,dictionary.specials,str,specification)
+end
+
+-- todo: unicodes or utfhash ?
+
+if context then
+
+ local nodecodes = nodes.nodecodes
+ local glyph_code = nodecodes.glyph
+ local math_code = nodecodes.math
+
+ local nuts = nodes.nuts
+ local tonut = nodes.tonut
+ local nodepool = nuts.pool
+
+ local new_disc = nodepool.disc
+
+ local setfield = nuts.setfield
+ local getfield = nuts.getfield
+ local getchar = nuts.getchar
+ local getid = nuts.getid
+ local getnext = nuts.getnext
+ local getprev = nuts.getprev
+ local insert_before = nuts.insert_before
+ local insert_after = nuts.insert_after
+ local copy_node = nuts.copy
+ local remove_node = nuts.remove
+ local end_of_math = nuts.end_of_math
+ local node_tail = nuts.tail
+
+ function traditional.loadpatterns(language)
+ return dictionaries[language]
+ end
+
+ statistics.register("hyphenation",function()
+ if nofwords > 0 then
+ return string.format("%s words hyphenated, %s unique",nofwords,nofhashed)
+ end
+ end)
+
+ setmetatableindex(dictionaries,function(t,k) -- we use an independent data structure
+ local specification = languages.getdata(k)
+ local dictionary = {
+ patterns = { },
+ hyphenated = { },
+ specials = { },
+ instance = 0,
+ characters = { },
+ unicodes = { },
+ }
+ if specification then
+ local resources = specification.resources
+ if resources then
+ local patterns = resources.patterns
+ if patterns then
+ local data = patterns.data
+ if data then
+ -- regular patterns
+ lpegmatch(word,data,1,dictionary.patterns,dictionary.specials)
+ end
+ local extra = patterns.extra
+ if extra then
+ -- special patterns
+ lpegmatch(word,extra,1,dictionary.patterns,dictionary.specials)
+ end
+ end
+ local usedchars = lpegmatch(split,patterns.characters)
+ local characters = { }
+ local unicodes = { }
+ for i=1,#usedchars do
+ local char = usedchars[i]
+ local code = utfbyte(char)
+ local upper = uccodes[code]
+ characters[char] = code
+ unicodes [code] = char
+ unicodes [upper] = utfchar(upper)
+ end
+ dictionary.characters = characters
+ dictionary.unicodes = unicodes
+ setmetatableindex(characters,function(t,k) local v = utfbyte(k) t[k] = v return v end) -- can be non standard
+ -- setmetatableindex(unicodes, function(t,k) local v = utfchar(k) t[k] = v return v end)
+ end
+ t[specification.number] = dictionary
+ dictionary.instance = specification.instance -- needed for hyphenchars
+ end
+ t[k] = dictionary
+ return dictionary
+ end)
+
+ local function flush(head,start,stop,dictionary,w,h,lefthyphenchar,righthyphenchar,characters,lefthyphenmin,righthyphenmin)
+ local r = { }
+ local l = #h
+ local n = 0
+ local i = 1
+ local left = lefthyphenmin
+ local right = l - righthyphenmin + 1
+ while i <= l do
+ if i > left and i < right then
+ local hi = h[i]
+ if not hi then
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ elseif hi == true then
+ n = n + 1
+ r[n] = true
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ else
+ local b = i - hi.start -- + 1 - 1
+ local e = b + hi.length - 1
+ n = b
+ r[n] = { hi.before, hi.after, concat(w,"",b,e) }
+ i = e + 1
+ end
+ else
+ n = n + 1
+ r[n] = w[i]
+ i = i + 1
+ end
+ end
+
+ local function serialize(s,lefthyphenchar,righthyphenchar)
+ if not s then
+ return
+ elseif s == true then
+ local n = copy_node(stop)
+ setfield(n,"char",lefthyphenchar or righthyphenchar)
+ return n
+ end
+ local h = nil
+ local c = nil
+ if lefthyphenchar then
+ h = copy_node(stop)
+ setfield(h,"char",lefthyphenchar)
+ c = h
+ end
+ if #s == 1 then
+ local n = copy_node(stop)
+ setfield(n,"char",characters[s])
+ if not h then
+ h = n
+ else
+ insert_after(c,c,n)
+ end
+ c = n
+ else
+ local t = lpegmatch(split,s)
+ for i=1,#t do
+ local n = copy_node(stop)
+ setfield(n,"char",characters[t[i]])
+ if not h then
+ h = n
+ else
+ insert_after(c,c,n)
+ end
+ c = n
+ end
+ end
+ if righthyphenchar then
+ local n = copy_node(stop)
+ insert_after(c,c,n)
+ setfield(n,"char",righthyphenchar)
+ end
+ return h
+ end
+
+ -- no grow
+
+ local current = start
+ local size = #r
+ for i=1,size do
+ local ri = r[i]
+ if ri == true then
+ local n = new_disc()
+ if righthyphenchar then
+ setfield(n,"pre",serialize(true,righthyphenchar))
+ end
+ if lefthyphenchar then
+ setfield(n,"post",serialize(true,lefthyphenchar))
+ end
+ insert_before(head,current,n)
+ elseif type(ri) == "table" then
+ local n = new_disc()
+ local pre, post, replace = ri[1], ri[2], ri[3]
+ if pre then
+ setfield(n,"pre",serialize(pre,false,righthyphenchar))
+ end
+ if post then
+ setfield(n,"post",serialize(post,lefthyphenchar,false))
+ end
+ if replace then
+ setfield(n,"replace",serialize(replace))
+ end
+ insert_before(head,current,n)
+ else
+ setfield(current,"char",characters[ri])
+ if i < size then
+ current = getnext(current)
+ end
+ end
+ end
+ if current ~= stop then
+ local current = getnext(current)
+ local last = getnext(stop)
+ while current ~= last do
+ head, current = remove_node(head,current,true)
+ end
+ end
+ end
+
+ -- simple cases: no special .. only inject
+
+ local prehyphenchar = lang.prehyphenchar
+ local posthyphenchar = lang.posthyphenchar
+
+ local lccodes = characters.lccodes
+
+ -- An experimental feature:
+ --
+ -- \setupalign[verytolerant,flushleft]
+ -- \setuplayout[width=140pt] \showframe
+ -- longword longword long word longword longwordword \par
+ -- \enabledirectives[hyphenators.rightwordsmin=1]
+ -- longword longword long word longword longwordword \par
+ -- \disabledirectives[hyphenators.rightwordsmin]
+ --
+ -- An alternative is of course to pack the words in an hbox.
+
+ local rightwordsmin = 0 -- todo: parproperties (each par has a number anyway)
+
+ function traditional.hyphenate(head)
+ local first = tonut(head)
+ local current = first
+ local dictionary = nil
+ local instance = nil
+ local characters = nil
+ local unicodes = nil
+ local language = nil
+ local start = nil
+ local stop = nil
+ local word = nil -- maybe reuse and pass size
+ local size = 0
+ local leftchar = false
+ local rightchar = false -- utfbyte("-")
+ local leftmin = 0
+ local rightmin = 0
+ local lastone = nil
+
+ if rightwordsmin > 0 then
+ lastone = node_tail(first)
+ local inword = false
+ while lastone and rightwordsmin > 0 do
+ local id = getid(lastone)
+ if id == glyph_code then
+ inword = true
+ elseif inword then
+ inword = false
+ rightwordsmin = rightwordsmin - 1
+ end
+ lastone = getprev(lastone)
+ end
+ end
+
+ while current ~= lastone do
+ local id = getid(current)
+ if id == glyph_code then
+ -- currently no lc/uc code support
+ local code = getchar(current)
+ local lang = getfield(current,"lang")
+ if lang ~= language then
+ if dictionary then
+ if leftmin + rightmin < #word then
+ local done = hyphenate(dictionary,word)
+ if done then
+ flush(first,start,stop,dictionary,word,done,leftchar,rightchar,characters,leftmin,rightmin)
+ end
+ end
+ end
+ language = lang
+ dictionary = dictionaries[language]
+ instance = dictionary.instance
+ characters = dictionary.characters
+ unicodes = dictionary.unicodes
+ leftchar = instance and posthyphenchar(instance)
+ rightchar = instance and prehyphenchar (instance)
+ leftmin = getfield(current,"left")
+ rightmin = getfield(current,"right")
+ if not leftchar or leftchar < 0 then
+ leftchar = false
+ end
+ if not rightchar or rightchar < 0 then
+ rightchar = false
+ end
+ local char = unicodes[code]
+ if char then
+ word = { char }
+ size = 1
+ start = current
+ end
+ elseif word then
+ local char = unicodes[code]
+ if char then
+ size = size + 1
+ word[size] = char
+ elseif dictionary then
+ if leftmin + rightmin < #word then
+ local done = hyphenate(dictionary,word)
+ if done then
+ flush(first,start,stop,dictionary,word,done,leftchar,rightchar,characters,leftmin,rightmin)
+ end
+ end
+ word = nil
+ end
+ else
+ local char = unicodes[code]
+ if char then
+ word = { char }
+ size = 1
+ start = current
+ -- leftmin = getfield(current,"left") -- can be an option
+ -- rightmin = getfield(current,"right") -- can be an option
+ end
+ end
+ stop = current
+ current = getnext(current)
+ elseif word then
+ if dictionary then
+ if leftmin + rightmin < #word then
+ local done = hyphenate(dictionary,word)
+ current = getnext(current)
+ if done then
+ flush(first,start,stop,dictionary,word,done,leftchar,rightchar,characters,leftmin,rightmin)
+ end
+ else
+ current = getnext(current) -- hm
+ end
+ else
+ current = getnext(current)
+ end
+ word = nil
+ elseif id == math_code then
+ current = getnext(end_of_math(current))
+ else
+ current = getnext(current)
+ end
+ end
+ return head, true
+ end
+
+ local texmethod = "builders.kernel.hyphenation"
+ local oldmethod = texmethod
+ local newmethod = texmethod
+
+ -- local newmethod = "languages.hyphenators.traditional.hyphenate"
+ --
+ -- nodes.tasks.prependaction("processors","words",newmethod)
+ -- nodes.tasks.disableaction("processors",oldmethod)
+ --
+ -- nodes.tasks.replaceaction("processors","words",oldmethod,newmethod)
+
+ -- \enabledirectives[hyphenators.method=traditional]
+ -- \enabledirectives[hyphenators.method=builtin]
+
+ directives.register("hyphenators.method",function(v)
+ if type(v) == "string" then
+ local valid = languages.hyphenators[v]
+ if valid and valid.hyphenate then
+ newmethod = "languages.hyphenators." .. v .. ".hyphenate"
+ else
+ newmethod = texmethod
+ end
+ else
+ newmethod = texmethod
+ end
+ if oldmethod ~= newmethod then
+ nodes.tasks.replaceaction("processors","words",oldmethod,newmethod)
+ end
+ oldmethod = newmethod
+ end)
+
+ -- experimental feature
+
+ directives.register("hyphenators.rightwordsmin",function(v)
+ rightwordsmin = tonumber(v) or 0
+ end)
+
+else
+
+ -- traditional.loadpatterns("nl","lang-nl")
+ -- traditional.loadpatterns("de","lang-de")
+
+ traditional.registerpattern("nl","e1ë", { start = 1, length = 2, before = "e", after = "e" } )
+ traditional.registerpattern("nl","oo1ë", { start = 2, length = 3, before = "o", after = "e" } )
+ traditional.registerpattern("de","qqxc9xkqq",{ start = 3, length = 4, before = "ab", after = "cd" } )
+
+ local specification = {
+ lefthyphenmin = 2,
+ righthyphenmin = 2,
+ lefthyphenchar = "<",
+ righthyphenchar = ">",
+ }
+
+ print("reëel", traditional.injecthyphens(dictionaries.nl,"reëel", specification),"r{e>}{<e}{eë}el")
+ print("reeëel", traditional.injecthyphens(dictionaries.nl,"reeëel", specification),"re{e>}{<e}{eë}el")
+ print("rooëel", traditional.injecthyphens(dictionaries.nl,"rooëel", specification),"r{o>}{<e}{ooë}el")
+
+ print( "qxcxkq", traditional.injecthyphens(dictionaries.de, "qxcxkq", specification),"")
+ print( "qqxcxkqq", traditional.injecthyphens(dictionaries.de, "qqxcxkqq", specification),"")
+ print( "qqqxcxkqqq", traditional.injecthyphens(dictionaries.de, "qqqxcxkqqq", specification),"")
+ print("qqqqxcxkqqqq",traditional.injecthyphens(dictionaries.de,"qqqqxcxkqqqq",specification),"")
+
+end
+
diff --git a/tex/context/base/lang-hyp.mkiv b/tex/context/base/lang-hyp.mkiv
new file mode 100644
index 000000000..0cd5a72ca
--- /dev/null
+++ b/tex/context/base/lang-hyp.mkiv
@@ -0,0 +1,109 @@
+%D \module
+%D [ file=lang-ini,
+%D version=2014.08.10,
+%D title=\CONTEXT\ Language Macros,
+%D subtitle=Experimental Patterns,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+%D This is an experimental module. We often have to deal with titles
+%D that have conflicting demands:
+%D
+%D \startitemize
+%D \startitem They go into a dedicated space (often a graphic). \stopitem
+%D \startitem The words cannot be hyphenated. \stopitem
+%D \startitem But as an escape they can get hyphenated. \stopitem
+%D \startitem In that case we want at least an extra word on the last line. \stopitem
+%D \stopitemize
+%D
+%D These and maybe more cases can be dealt with using dedicated hyphenation
+%D mechanisms. At he same time we want to experiment with more extensive patterns
+%D as discussed in {\em TUGboat, Volume 27 (2006), No. 2—Proceedings of EuroTEX2006}.
+
+% lua: 5.341 5.354
+% tex: 5.174 5.262
+
+\writestatus{loading}{ConTeXt Language Macros / Initialization}
+
+\registerctxluafile{lang-hyp}{1.001}
+
+%D This command can change! At some point we will keep the setting with the
+%D paragraph and then the \type {\par} can go.
+
+\unexpanded\def\atleastoneword#1%
+ {\begingroup
+ \enabledirectives[hyphenators.method=traditional]%
+ \enabledirectives[hyphenators.rightwordsmin=1]%
+ \lefthyphenmin \plusfour
+ \righthyphenmin\plusfour
+ #1\par
+ \disabledirectives[hyphenators.rightwordsmin]%
+ \enabledirectives[hyphenators.method]%
+ \endgroup}
+
+\endinput
+
+% \starttext
+%
+% \enabledirectives[hyphenators.method=traditional]
+%
+% % \dorecurse{1000}{\input tufte \par}
+%
+% \setupalign[verytolerant,flushleft]
+% \setuplayout[width=140pt] \showframe
+%
+% longword longword long word longword longwordword \blank
+%
+% \enabledirectives[hyphenators.rightwordsmin=1]
+%
+% longword longword long word longword longwordword\blank
+%
+% \disabledirectives[hyphenators.rightwordsmin]
+%
+% longword longword long word longword longwordword\blank
+%
+% \atleastoneword{longword longword long word longword longwordword}
+%
+% \enabledirectives[hyphenators.method=traditional]
+%
+% \stoptext
+
+% \startluacode
+% -- e1ë/e=e reëel re-eel
+% -- a1atje./a=t,1,3 omaatje oma-tje
+% -- schif1f/ff=f,5,2 Schiffahrt Schiff-fahrt
+%
+% languages.hyphenators.traditional.registerpattern("en","a1b", { start = 1, length = 2, before = "CD", after = "EF" } )
+% languages.hyphenators.traditional.registerpattern("en","e1ë", { start = 1, length = 2, before = "e", after = "e" } )
+% languages.hyphenators.traditional.registerpattern("en","oo1ë", { start = 2, length = 2, before = "o", after = "e" } )
+% languages.hyphenators.traditional.registerpattern("en","qqxc9xkqq",{ start = 3, length = 4, before = "ab", after = "cd" } ) -- replacement start length
+%
+% -- print("reëel", injecthyphens(dictionaries.nl,"reëel", 2,2))
+% -- print("reeëel", injecthyphens(dictionaries.nl,"reeëel", 2,2))
+% -- print("rooëel", injecthyphens(dictionaries.nl,"rooëel", 2,2))
+% -- print( "QXcXkQ", injecthyphens(dictionaries.de, "QXcXkQ", 2,2))
+% -- print( "QQXcXkQQ", injecthyphens(dictionaries.de, "QQXcXkQQ", 2,2))
+% -- print( "QQQXcXkQQQ", injecthyphens(dictionaries.de, "QQQXcXkQQQ", 2,2))
+% -- print("QQQQXcXkQQQQ",injecthyphens(dictionaries.de,"QQQQXcXkQQQQ",2,2))
+% --
+% -- print( "QQXcXkQQ QQXcXkQQ", injecthyphens(dictionaries.de, "QQXcXkQQ QQXcXkQQ", 2,2))
+% \stopluacode
+%
+% \starttext
+%
+% \blank
+%
+% xreëel rooëel \par xxabxx xxxabxxx \par
+%
+% \hsize1mm \lefthyphenmin2 \righthyphenmin2
+%
+% \blank Capacity \blank capacity \blank xyabxy \blank xreëel \blank rooëel \blank
+%
+% xy\discretionary{CD}{EF}{ab}xy % xxacceedxxx
+%
+% \stoptext
diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua
index 70a8ae8d6..63a0e9f88 100644
--- a/tex/context/base/math-fbk.lua
+++ b/tex/context/base/math-fbk.lua
@@ -330,7 +330,7 @@ end
-- we could move the defs from math-act here
-local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset)
+local function accent_to_extensible(target,newchr,original,oldchr,height,depth,swap,offset,unicode)
local characters = target.characters
local olddata = characters[oldchr]
-- brrr ... pagella has only next
@@ -346,10 +346,11 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height + (offset or 0)}
local newdata = {
- commands = { correction, { "slot", 1, oldchr } },
- width = olddata.width,
- height = height,
- depth = depth,
+ commands = { correction, { "slot", 1, oldchr } },
+ width = olddata.width,
+ height = height,
+ depth = depth,
+ tounicode = tounicode16(unicode),
}
local glyphdata = newdata
local nextglyph = olddata.next
@@ -400,6 +401,9 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s
end
return glyphdata, true
else
+-- if not olddata.tounicode then
+-- olddata.tounicode = tounicode16(unicode),
+-- end
return olddata, false
end
end
@@ -415,7 +419,7 @@ virtualcharacters[0x203E] = function(data) -- could be FE33E instead
height = target.parameters.xheight/4
depth = height
end
- return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth)
+ return accent_to_extensible(target,0x203E,data.original,0x0305,height,depth,nil,nil,0x203E)
end
virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient
@@ -426,37 +430,37 @@ local function smashed(data,unicode,swap,private)
local original = data.original
local chardata = target.characters[unicode]
if chardata and chardata.height > target.parameters.xheight then
- return accent_to_extensible(target,private,original,unicode,0,0,swap)
+ return accent_to_extensible(target,private,original,unicode,0,0,swap,nil,unicode)
else
return original.characters[unicode]
end
end
-addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DE, { description="EXTENSIBLE OF 0x03DE", unicodeslot=0xFE3DE, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3DC, { description="EXTENSIBLE OF 0x03DC", unicodeslot=0xFE3DC, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE3B4, { description="EXTENSIBLE OF 0x03B4", unicodeslot=0xFE3B4, mathextensible = "r", mathstretch = "h", mathclass = "topaccent" } )
virtualcharacters[0xFE3DE] = function(data) return smashed(data,0x23DE,0x23DF,0xFE3DE) end
virtualcharacters[0xFE3DC] = function(data) return smashed(data,0x23DC,0x23DD,0xFE3DC) end
virtualcharacters[0xFE3B4] = function(data) return smashed(data,0x23B4,0x23B5,0xFE3B4) end
-addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h" } )
-addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h" } )
+addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
+addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } )
-virtualcharacters[0xFE3DF] = function(data) return data.target.characters[0x23DF] end
-virtualcharacters[0xFE3DD] = function(data) return data.target.characters[0x23DD] end
-virtualcharacters[0xFE3B5] = function(data) return data.target.characters[0x23B5] end
+virtualcharacters[0xFE3DF] = function(data) local c = data.target.characters[0x23DF] if c then c.tounicode = tounicode16(0x23DF) return c end end
+virtualcharacters[0xFE3DD] = function(data) local c = data.target.characters[0x23DD] if c then c.tounicode = tounicode16(0x23DD) return c end end
+virtualcharacters[0xFE3B5] = function(data) local c = data.target.characters[0x23B5] if c then c.tounicode = tounicode16(0x23B5) return c end end
-- todo: add some more .. numbers might change
-addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h" } )
-addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h" } )
+addextra(0xFE302, { description="EXTENSIBLE OF 0x0302", unicodeslot=0xFE302, mathstretch = "h", mathclass = "topaccent" } )
+addextra(0xFE303, { description="EXTENSIBLE OF 0x0303", unicodeslot=0xFE303, mathstretch = "h", mathclass = "topaccent" } )
local function smashed(data,unicode,private)
local target = data.target
local height = target.parameters.xheight / 2
- local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height)
+ local c, done = accent_to_extensible(target,private,data.original,unicode,height,0,nil,-height,unicode)
if done then
c.top_accent = nil -- or maybe also all the others
end
diff --git a/tex/context/base/math-ini.lua b/tex/context/base/math-ini.lua
index 9772ce538..4cfa02e4e 100644
--- a/tex/context/base/math-ini.lua
+++ b/tex/context/base/math-ini.lua
@@ -24,6 +24,7 @@ local commands = commands
local context_sprint = context.sprint
----- context_fprint = context.fprint -- a bit inefficient
+local ctx_doifelse = commands.doifelse
local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
@@ -464,7 +465,7 @@ end
local function utfmathfiller(chr, default)
local cd = somechar[chr]
- local cmd = cd and (cd.mathfiller or cd.mathname)
+ local cmd = cd and cd.mathfiller -- or cd.mathname
return cmd or default or ""
end
@@ -481,15 +482,46 @@ function commands.utfmathstretch(...) context(utfmathstretch(...)) end
function commands.utfmathcommand(...) context(utfmathcommand(...)) end
function commands.utfmathfiller (...) context(utfmathfiller (...)) end
-function commands.doifelseutfmathaccent(chr,asked)
- commands.doifelse(utfmathaccent(chr,nil,asked))
+function commands.utfmathcommandabove(asked)
+ local c = utfmathcommand(asked,nil,"topaccent","over" )
+ if c ~= "" then
+ context(c)
+ end
+end
+
+function commands.utfmathcommandbelow (asked)
+ local c = utfmathcommand(asked,nil,"botaccent","under")
+ if c ~= "" then
+ context(c)
+ end
end
-function commands.utfmathcommandabove(asked) context(utfmathcommand(asked,nil,"topaccent","over" )) end
-function commands.utfmathcommandbelow(asked) context(utfmathcommand(asked,nil,"botaccent","under")) end
+function commands.utfmathcommandfiller(asked)
+ local c = utfmathfiller(asked,nil)
+ if c ~= "" then
+ context(c)
+ end
+end
-function commands.doifelseutfmathabove(chr) commands.doifelse(utfmathaccent(chr,nil,"topaccent","over" )) end
-function commands.doifelseutfmathbelow(chr) commands.doifelse(utfmathaccent(chr,nil,"botaccent","under")) end
+function commands.doifelseutfmathabove(chr)
+ local c = utfmathaccent(chr,nil,"topaccent","over")
+ ctx_doifelse(c and c ~= "")
+end
+
+function commands.doifelseutfmathbelow(chr)
+ local c = utfmathaccent(chr,nil,"botaccent","under")
+ ctx_doifelse(c and c ~= "")
+end
+
+function commands.doifelseutfmathaccent(chr,asked)
+ local c = utfmathaccent(chr,nil,asked)
+ ctx_doifelse(c and c ~= "")
+end
+
+function commands.doifelseutfmathfiller(chr)
+ local c = utfmathfiller(chr,nil)
+ ctx_doifelse(c and c ~= "")
+end
-- helpers
--
diff --git a/tex/context/base/math-ini.mkiv b/tex/context/base/math-ini.mkiv
index a2f481df2..a7b2a924c 100644
--- a/tex/context/base/math-ini.mkiv
+++ b/tex/context/base/math-ini.mkiv
@@ -301,8 +301,9 @@
\def\utfmathclassfiltered #1#2{\ctxcommand{utfmathclass (\!!bs#1\!!es,nil,"#2")}}
\def\utfmathcommandfiltered#1#2{\ctxcommand{utfmathcommand(\!!bs#1\!!es,nil,"#2")}}
-\def\utfmathcommandabove#1{\ctxcommand{utfmathcommandabove(\!!bs#1\!!es)}}
-\def\utfmathcommandbelow#1{\ctxcommand{utfmathcommandbelow(\!!bs#1\!!es)}}
+\def\utfmathcommandabove #1{\ctxcommand{utfmathcommandabove (\!!bs#1\!!es)}}
+\def\utfmathcommandbelow #1{\ctxcommand{utfmathcommandbelow (\!!bs#1\!!es)}}
+\def\utfmathcommandfiller#1{\ctxcommand{utfmathcommandfiller(\!!bs#1\!!es)}}
\unexpanded\def\doifelseutfmathaccent #1{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es)}}
\unexpanded\def\doifelseutfmathaccentfiltered#1#2{\ctxcommand{doifelseutfmathaccent(\!!bs#1\!!es,"#2")}}
@@ -310,6 +311,8 @@
\unexpanded\def\doifelseutfmathabove #1{\ctxcommand{doifelseutfmathabove(\!!bs#1\!!es)}}
\unexpanded\def\doifelseutfmathbelow #1{\ctxcommand{doifelseutfmathbelow(\!!bs#1\!!es)}}
+\unexpanded\def\doifelseutfmathfiller #1{\ctxcommand{doifelseutfmathfiller(\!!bs#1\!!es)}}
+
%D Not used that much:
\installcorenamespace{mathcodecommand}
diff --git a/tex/context/base/math-int.mkiv b/tex/context/base/math-int.mkiv
index 6b480961b..6b65738ff 100644
--- a/tex/context/base/math-int.mkiv
+++ b/tex/context/base/math-int.mkiv
@@ -13,6 +13,8 @@
\writestatus{loading}{ConTeXt Math Macros / Integrals}
+% todo: int and sum etc can be stackers
+
\unprotect
%D \startbuffer
diff --git a/tex/context/base/math-stc.mkvi b/tex/context/base/math-stc.mkvi
index 140d0244b..ca39287c5 100644
--- a/tex/context/base/math-stc.mkvi
+++ b/tex/context/base/math-stc.mkvi
@@ -16,6 +16,8 @@
\unprotect
+%D WARNING: If the code here changes, the export needs to be checked!
+
%D At some point the \MKII\ arrow mechanism has been converted to \MKIV, but we kept
%D most of the logic. We now have a more generic variant dealing with extensibles.
%D There are a few demands than we need to meet:
@@ -78,7 +80,7 @@
{\mathstylehbox{\usemathstackerscolorparameter\c!color
\Umathaccent\fam\zerocount\scratchunicode{\hskip\hsize}}}
-% these delimiters are a unuseable as theu don't center for small arguments:
+% these delimiters are a unuseable as they don't center for small arguments:
%
% $\Umathaccent 0 0 "2190{x}$ \par $\Umathaccent 0 0 "27F8{x}$\par
% $\Udelimiterunder 0 "2190{x}$ \par $\Udelimiterunder 0 "27F8{x}$\par
@@ -121,6 +123,18 @@
\def\math_stackers_skip_indeed#amount%
{\filledhboxk{\unsetteststrut\strut\hskip#amount}} % \dontshowstruts
+\let\math_stackers_start_tagged_mid\relax
+\let\math_stackers_start_tagged_top\relax
+\let\math_stackers_start_tagged_bot\relax
+\let\math_stackers_stop_tagged \relax
+
+\appendtoks
+ \def\math_stackers_start_tagged_mid{\dostarttagged\t!mathstackermid\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_top{\dostarttagged\t!mathstackertop\empty\hbox\bgroup}%
+ \def\math_stackers_start_tagged_bot{\dostarttagged\t!mathstackerbot\empty\hbox\bgroup}%
+ \def\math_stackers_stop_tagged {\egroup\dostoptagged}%
+\to \everysetuptagging
+
%D We define a full featured command handler.
\installcorenamespace {mathstackers}
@@ -139,6 +153,7 @@
\c!mpoffset=.25\exheight,
\c!voffset=.25\exheight,
\c!hoffset=.5\emwidth,
+ \c!distance=\mathstackersparameter\c!voffset, % distance between symbol and base (can be different from voffset)
\c!minheight=\exheight,
\c!mindepth=\zeropoint,
\c!minwidth=\emwidth,
@@ -264,6 +279,7 @@
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\p_offset {\mathstackersparameter\c!offset}%
\edef\p_location {\mathstackersparameter\c!location}%
@@ -311,7 +327,11 @@
\fi
\scratchwidth\wd
\ifdim\wd\scratchboxone>\wd\scratchboxtwo
- \scratchboxone
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
\else\ifdim\wd\scratchboxtwo>\wd\scratchboxthree
\scratchboxtwo
\else
@@ -327,7 +347,9 @@
\advance\scratchwidth2\scratchhoffset
%
\ifcase#method\relax
+ \dostarttagged\t!mathstackermid\empty
\setbox\scratchboxthree\csname\??mathstackersalternative\p_alternative\endcsname
+ \dostoptagged
\fi
%
\ifdim\wd\scratchboxone<\scratchwidth
@@ -371,10 +393,13 @@
%
\math_stackers_normalize_three
%
+ \math_stackers_start_tagged_mid
\math_stackers_middle\bgroup
\box\scratchboxthree
\egroup
+ \math_stackers_stop_tagged
%
+ \math_stackers_start_tagged_top
\ifdim\htdp\scratchboxone>\zeropoint
\scratchoffset\scratchvoffset
\kern-\scratchwidth
@@ -383,7 +408,9 @@
\box\scratchboxone
\egroup
\fi
+ \math_stackers_stop_tagged
%
+ \math_stackers_start_tagged_bot
\ifdim\htdp\scratchboxtwo>\zeropoint
\scratchoffset\scratchvoffset
\kern-\scratchwidth
@@ -391,7 +418,9 @@
\lower\dimexpr\ht\scratchboxtwo+\scratchdepth+\scratchoffset+\scratchbottomoffset\relax
\box\scratchboxtwo
\egroup
- \fi}%
+ \fi
+ \math_stackers_stop_tagged}%
+ \dostoptagged
\mathstackersparameter\c!right\relax
\endgroup}
@@ -453,6 +482,7 @@
{\begingroup
\edef\currentmathstackers{#category}%
\mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
\ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
{\edef\currentmathstackers{#category}%
\edef\m_math_stackers_text_middle {#text}%
@@ -480,37 +510,51 @@
\fi
\advance\scratchwidth2\scratchhoffset
%
- \setbox\scratchboxtwo \csname\??mathstackersalternative\p_alternative\endcsname
- \setbox\scratchboxthree\hbox to \scratchwidth{\hss\box\scratchboxthree\hss}%
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ \setbox\scratchboxthree\hbox
+ to \scratchwidth{\hss\box\scratchboxthree\hss}%
%
\math_stackers_normalize_three
%
+ \math_stackers_start_tagged_mid
\math_stackers_middle\bgroup
\box\scratchboxthree
\egroup
+ \math_stackers_stop_tagged
%
\ifdim\htdp\scratchboxtwo>\zeropoint
\kern-\scratchwidth
+ \math_stackers_start_tagged_top
\ifcase#top\else
\math_stackers_top\bgroup
- % \raise\dimexpr\scratchheight+\scratchtopoffset\relax
- \raise\dimexpr\scratchheight+\mathstackersparameter\c!voffset\relax
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
\box\scratchboxtwo
\egroup
\fi
+ \math_stackers_stop_tagged
\scratchunicode#codeextra\relax
+ \math_stackers_start_tagged_bot
\ifcase\scratchunicode\else
\kern-\scratchwidth
\setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
\fi
\ifcase#bottom\else
\math_stackers_bottom\bgroup
- % \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\scratchbottomoffset\relax
- \lower\dimexpr\scratchdepth+\ht\scratchboxtwo+\mathstackersparameter\c!voffset\relax
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance % was \c!voffset
+ \relax
\box\scratchboxtwo
\egroup
\fi
+ \math_stackers_stop_tagged
\fi}%
+ \dostoptagged
\mathstackersparameter\c!right\relax
\edef\p_limits{\mathstackersparameter\c!mathlimits}%
\ifx\p_limits\v!yes
@@ -551,14 +595,176 @@
\def\math_stackers_handle_over[#category]%
{\math_stackers_direct_double\plusone\zerocount{\iffirstargument#category\else\v!top \fi}} % will be defined later on
-\def\math_stackers_handle_under[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_under[#category]%
{\math_stackers_direct_double\zerocount\plusone{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
-\def\math_stackers_handle_double[#category]#codepoint#bottomtext%
+\def\math_stackers_handle_double[#category]%
{\math_stackers_direct_double\plusone\plusone {\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
\def\math_stackers_direct_double#top#bottom#category#codepoint#text%
- {\math_stackers_make_double#top#bottom{#category}{#codepoint}{#text}%
+ {\math_stackers_make_double#top#bottom{#category}{#codepoint}{0}{#text}%
+ \endgroup}
+
+%D A relative new one is a combination of accents and text (as needed in mathml):
+
+\unexpanded\def\math_stackers_make_double_text#where#category#codepoint#text#extra%
+ {\begingroup
+ \edef\currentmathstackers{#category}%
+ \mathstackersparameter\c!left\relax
+ \dostarttagged\t!mathstacker\currentmathstackers
+ \ifmmode\math_class_by_parameter\mathstackersparameter\else\dontleavehmode\fi
+ {\edef\currentmathstackers{#category}%
+ %
+ \edef\p_offset {\mathstackersparameter\c!offset}%
+ \edef\p_location {\mathstackersparameter\c!location}%
+ \edef\p_strut {\mathstackersparameter\c!strut}%
+ \edef\p_alternative{\mathstackersparameter\c!alternative}%
+ %
+ \scratchleftoffset \zeropoint
+ \scratchrightoffset\zeropoint
+ %
+ \edef\m_math_stackers_text_middle{#text}%
+ \math_stackers_check_unicode{#codepoint}%
+ \scratchunicode#codepoint\relax
+ %
+ \ifx\math_stackers_middle\empty
+ \setbox\scratchboxthree\emptyhbox
+ \else
+ \setmathtextbox\scratchboxthree\hbox{\math_stackers_middletext}%
+ \fi
+ %
+ \ifcase#where\relax
+ \edef\m_math_stackers_text_top{#extra}%
+ \ifx\math_stackers_top\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_toptext}%
+ \fi
+ \else
+ \edef\m_math_stackers_text_bottom{#extra}%
+ \ifx\math_stackers_bottom\empty
+ \setbox\scratchboxone\emptyhbox
+ \else
+ \setmathsmalltextbox\scratchboxone\hbox{\math_stackers_bottomtext}%
+ \fi
+ \fi
+ %
+ \scratchwidth\wd
+ \ifdim\wd\scratchboxone>\wd\scratchboxthree
+ \scratchboxone
+ \else
+ \scratchboxthree
+ \fi
+ \relax
+ \scratchdimen\mathstackersparameter\c!minwidth\relax
+ \ifdim\scratchwidth<\scratchdimen
+ \scratchwidth\scratchdimen
+ \fi
+ \advance\scratchwidth2\scratchhoffset
+ %
+ \ifdim\wd\scratchboxone<\scratchwidth
+ \setbox\scratchboxone\hbox to \scratchwidth{\hss\unhbox\scratchboxone\hss}%
+ \fi
+ \ifdim\wd\scratchboxthree<\scratchwidth
+ \setbox\scratchboxthree\hbox to \scratchwidth{\hss\unhbox\scratchboxthree\hss}%
+ \fi
+ %
+ \math_stackers_normalize_three
+ %
+ \math_stackers_start_tagged_mid
+ \math_stackers_middle\bgroup
+ \box\scratchboxthree
+ \egroup
+ \math_stackers_stop_tagged
+ %
+ \kern-\scratchwidth
+ \ifcase#where\relax
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ %
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % toptext
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxtwo
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \else
+ \setbox\scratchboxtwo\csname\??mathstackersalternative\p_alternative\endcsname
+ %
+ \math_stackers_start_tagged_top
+ \math_stackers_top\bgroup
+ \raise\dimexpr
+ \scratchheight
+ +\dp\scratchboxtwo % new
+ +\mathstackersparameter\c!distance
+ \relax
+ \box\scratchboxtwo % accent
+ \egroup
+ \math_stackers_stop_tagged
+ \kern-\scratchwidth
+ \math_stackers_start_tagged_bot
+ \math_stackers_bottom\bgroup
+ \lower\dimexpr
+ \scratchdepth
+ +\ht\scratchboxone
+ +\mathstackersparameter\c!voffset
+ \relax
+ \box\scratchboxone % bottext
+ \egroup
+ \math_stackers_stop_tagged
+ \fi
+ }%
+ \dostoptagged
+ \mathstackersparameter\c!right\relax
+ \edef\p_limits{\mathstackersparameter\c!mathlimits}%
+ \ifx\p_limits\v!yes
+ \expandafter\endgroup\expandafter\limits
+ \else
+ \expandafter\endgroup
+ \fi}
+
+\unexpanded\def\definemathovertextextensible {\dotripleempty\math_extensibles_define_over_text }
+\unexpanded\def\definemathundertextextensible{\dotripleempty\math_extensibles_define_under_text}
+
+\def\math_extensibles_define_over_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\plusone {#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\plusone \noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\def\math_extensibles_define_under_text[#1][#2][#3]%
+ {\ifthirdargument
+ \setuevalue{#2}{\math_stackers_make_double_text\zerocount{#1}{\number#3}}%
+ \else
+ \setuevalue{#1}{\math_stackers_make_double_text\zerocount\noexpand\currentmathstackers{\number#2}}%
+ \fi}
+
+\unexpanded\def\mathovertext {\begingroup\dosingleempty\math_stackers_handle_over_text }
+\unexpanded\def\mathundertext{\begingroup\dosingleempty\math_stackers_handle_under_text }
+
+\def\math_stackers_handle_over_text[#category]%
+ {\math_stackers_direct_double_text\plusone {\iffirstargument#category\else\v!top \fi}} % will be defined later on
+
+\def\math_stackers_handle_under_text[#category]%
+ {\math_stackers_direct_double_text\zerocount{\iffirstargument#category\else\v!bottom\fi}} % will be defined later on
+
+\def\math_stackers_direct_double_text#where#category#codepoint#text#extra%%
+ {\math_stackers_make_double_text#where{#category}{#codepoint}{#text}{#extra}%
\endgroup}
%D Here is a bonus macro that takes three texts. It can be used to get consistent
@@ -654,11 +860,23 @@
[\v!both]
\definemathstackers
- [vfenced]
+ [\v!vfenced]
[\v!both]
[\c!mathclass=\s!ord,
\c!mathlimits=\v!yes]
+% these are needed for mathml:
+
+% \setupmathstackers
+% [\v!both]
+% [\c!hoffset=1pt,
+% \c!voffset=1pt]
+
+\definemathstackers
+ [\v!bothtext]
+ [\v!both]
+ [\c!strut=\v!yes]
+
% These are compatibity definitions, math only.
% todo: top= bottom= middle= is nicer (compare math-fen)
@@ -761,6 +979,15 @@
\definemathextensible [\v!mathematics] [mrightleftharpoons] ["21CC]
\definemathextensible [\v!mathematics] [mtriplerel] ["2261]
+\definemathextensible [\v!mathematics] [eleftarrowfill] ["2190] % ["27F5]
+\definemathextensible [\v!mathematics] [erightarrowfill] ["2192] % ["27F6]
+\definemathextensible [\v!mathematics] [eleftrightarrowfill] ["27F7]
+\definemathextensible [\v!mathematics] [etwoheadrightarrowfill] ["27F9]
+\definemathextensible [\v!mathematics] [eleftharpoondownfill] ["21BD]
+\definemathextensible [\v!mathematics] [eleftharpoonupfill] ["21BC]
+\definemathextensible [\v!mathematics] [erightharpoondownfill] ["21C1]
+\definemathextensible [\v!mathematics] [erightharpoonupfill] ["21C0]
+
\definemathextensible [\v!text] [trel] ["002D]
\definemathextensible [\v!text] [tequal] ["003D]
\definemathextensible [\v!text] [tmapsto] ["21A6]
@@ -819,23 +1046,39 @@
% alternatively we can move the original to FE*
\definemathoverextensible [vfenced] [overbar] ["FE33E] % ["203E]
-\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
+\definemathunderextensible [vfenced] [underbar] ["FE33F] % ["203E]
\definemathdoubleextensible [vfenced] [doublebar] ["FE33E] ["FE33F]
\definemathoverextensible [vfenced] [overbrace] ["FE3DE] % ["023DE]
-\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
+\definemathunderextensible [vfenced] [underbrace] ["FE3DF] % ["023DF]
\definemathdoubleextensible [vfenced] [doublebrace] ["FE3DE] ["FE3DF]
\definemathoverextensible [vfenced] [overparent] ["FE3DC] % ["023DC]
-\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
+\definemathunderextensible [vfenced] [underparent] ["FE3DD] % ["023DD]
\definemathdoubleextensible [vfenced] [doubleparent] ["FE3DC] ["FE3DD]
\definemathoverextensible [vfenced] [overbracket] ["FE3B4] % ["023B4]
-\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
+\definemathunderextensible [vfenced] [underbracket] ["FE3B5] % ["023B5]
\definemathdoubleextensible [vfenced] [doublebracket] ["FE3B4] ["FE3B5]
% \unexpanded\def\mathopwithlimits#1#2{\mathop{#1{#2}}\limits}
+%D For mathml:
+
+\definemathdoubleextensible [both] [overbarunderbar] ["FE33E] ["FE33F]
+\definemathdoubleextensible [both] [overbraceunderbrace] ["FE3DE] ["FE3DF]
+\definemathdoubleextensible [both] [overparentunderparent] ["FE3DC] ["FE3DD]
+\definemathdoubleextensible [both] [overbracketunderbracket] ["FE3B4] ["FE3B5]
+
+\definemathovertextextensible [bothtext] [overbartext] ["FE33E]
+\definemathundertextextensible [bothtext] [underbartext] ["FE33F]
+\definemathovertextextensible [bothtext] [overbracetext] ["FE3DE]
+\definemathundertextextensible [bothtext] [underbracetext] ["FE3DF]
+\definemathovertextextensible [bothtext] [overparenttext] ["FE3DC]
+\definemathundertextextensible [bothtext] [underparenttext] ["FE3DD]
+\definemathovertextextensible [bothtext] [overbrackettext] ["FE3B4]
+\definemathundertextextensible [bothtext] [underbrackettext] ["FE3B5]
+
%D Some bonus ones (for the moment here):
\definemathstackers
diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua
index 77c182942..638c4629c 100644
--- a/tex/context/base/math-tag.lua
+++ b/tex/context/base/math-tag.lua
@@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['math-tag'] = {
license = "see context related readme files"
}
+-- todo: have a local list with local tags that then get appended
+
-- use lpeg matchers
local find, match = string.find, string.match
@@ -22,6 +24,7 @@ local getid = nuts.getid
local getchar = nuts.getchar
local getlist = nuts.getlist
local getfield = nuts.getfield
+local getsubtype = nuts.getsubtype
local getattr = nuts.getattr
local setattr = nuts.setattr
@@ -43,15 +46,30 @@ local math_style_code = nodecodes.style -- attr style
local math_choice_code = nodecodes.choice -- attr display text script scriptscript
local math_fence_code = nodecodes.fence -- attr subtype
+local accentcodes = nodes.accentcodes
+
+local math_fixed_top = accentcodes.fixedtop
+local math_fixed_bottom = accentcodes.fixedbottom
+local math_fixed_both = accentcodes.fixedboth
+
+local kerncodes = nodes.kerncodes
+
+local fontkern_code = kerncodes.fontkern
+
local hlist_code = nodecodes.hlist
local vlist_code = nodecodes.vlist
local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local math_code = nodecodes.math
local a_tagged = attributes.private('tagged')
+local a_taggedpar = attributes.private('taggedpar')
local a_exportstatus = attributes.private('exportstatus')
local a_mathcategory = attributes.private('mathcategory')
local a_mathmode = attributes.private('mathmode')
+local a_fontkern = attributes.private('fontkern')
local tags = structures.tags
@@ -67,6 +85,12 @@ local mathcodes = mathematics.codes
local ordinary_code = mathcodes.ordinary
local variable_code = mathcodes.variable
+local fromunicode16 = fonts.mappings.fromunicode16
+local font_of_family = node.family_font
+local fontcharacters = fonts.hashes.characters
+
+local report_tags = logs.reporter("structure","tags")
+
local process
local function processsubsup(start)
@@ -104,253 +128,363 @@ end
-- todo: variants -> original
local actionstack = { }
+local fencesstack = { }
+
+local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
+local splittag = C(P(1-S(":-"))^1) * (P(":") * C((1-P("-"))^1) + Cc(""))
+
+-- glyph nodes and such can happen in under and over stuff
+
+local detail_accent = { detail = "accent" }
+
+local function getunicode(n) -- instead of getchar
+ local char = getchar(n)
+ local font = font_of_family(getfield(n,"fam")) -- font_of_family
+ local data = fontcharacters[font][char]
+ local unic = data.tounicode
+ return unic and fromunicode16(unic) or char
+end
process = function(start) -- we cannot use the processor as we have no finalizers (yet)
+ local mtexttag = nil
while start do
local id = getid(start)
- if id == math_char_code then
- local char = getchar(start)
- -- check for code
- local a = getattr(start,a_mathcategory)
- if a then
- a = { detail = a }
+ if id == glyph_code or id == disc_code then
+ if not mtexttag then
+ mtexttag = start_tagged("mtext")
end
- local code = getmathcode(char)
- if code then
- code = code[1]
+ setattr(start,a_tagged,mtexttag)
+ elseif mtexttag and id == kern_code and (getsubtype(start) == fontkern_code or getattr(start,a_fontkern)) then
+ setattr(start,a_tagged,mtexttag)
+ else
+ if mtexttag then
+ stop_tagged()
+ mtexttag = nil
end
- local tag
- if code == ordinary_code or code == variable_code then
- local ch = chardata[char]
- local mc = ch and ch.mathclass
- if mc == "number" then
- tag = "mn"
- elseif mc == "variable" or not mc then -- variable is default
- tag = "mi"
+ if id == math_char_code then
+ local char = getchar(start)
+ -- check for code
+ local a = getattr(start,a_mathcategory)
+ if a then
+ a = { detail = a }
+ end
+ local code = getmathcode(char)
+ if code then
+ code = code[1]
+ end
+ local tag
+ if code == ordinary_code or code == variable_code then
+ local ch = chardata[char]
+ local mc = ch and ch.mathclass
+ if mc == "number" then
+ tag = "mn"
+ elseif mc == "variable" or not mc then -- variable is default
+ tag = "mi"
+ else
+ tag = "mo"
+ end
else
tag = "mo"
end
- else
- tag = "mo"
- end
- setattr(start,a_tagged,start_tagged(tag,a))
- stop_tagged()
- break -- okay?
- elseif id == math_textchar_code then
- -- check for code
- local a = getattr(start,a_mathcategory)
- if a then
- setattr(start,a_tagged,start_tagged("ms",{ detail = a }))
- else
- setattr(start,a_tagged,start_tagged("ms"))
- end
- stop_tagged()
- break
- elseif id == math_delim_code then
- -- check for code
- setattr(start,a_tagged,start_tagged("mo"))
- stop_tagged()
- break
- elseif id == math_style_code then
- -- has a next
- elseif id == math_noad_code then
- processsubsup(start)
- elseif id == math_box_code or id == hlist_code or id == vlist_code then
- -- keep an eye on math_box_code and see what ends up in there
- local attr = getattr(start,a_tagged)
- local last = attr and taglist[attr]
- if last and find(last[#last],"formulacaption[:%-]") then
- -- leave alone, will nicely move to the outer level
- else
- local text = start_tagged("mtext")
- setattr(start,a_tagged,text)
- local list = getfield(start,"list")
- if not list then
- -- empty list
- elseif not attr then
- -- box comes from strange place
- set_attributes(list,a_tagged,text)
+ setattr(start,a_tagged,start_tagged(tag,a))
+ stop_tagged()
+ break -- okay?
+ elseif id == math_textchar_code then -- or id == glyph_code
+ -- check for code
+ local a = getattr(start,a_mathcategory)
+ if a then
+ setattr(start,a_tagged,start_tagged("ms",{ detail = a })) -- mtext
else
- -- Beware, the first node in list is the actual list so we definitely
- -- need to nest. This approach is a hack, maybe I'll make a proper
- -- nesting feature to deal with this at another level. Here we just
- -- fake structure by enforcing the inner one.
- local tagdata = taglist[attr]
- local common = #tagdata + 1
- local function runner(list) -- quite inefficient
- local cache = { } -- we can have nested unboxed mess so best local to runner
- for n in traverse_nodes(list) do
- local id = getid(n)
- local aa = getattr(n,a_tagged)
- if aa then
- local ac = cache[aa]
- if not ac then
- local tagdata = taglist[aa]
- local extra = #tagdata
- if common <= extra then
- for i=common,extra do
- ac = restart_tagged(tagdata[i]) -- can be made faster
- end
- for i=common,extra do
- stop_tagged() -- can be made faster
+ setattr(start,a_tagged,start_tagged("ms")) -- mtext
+ end
+ stop_tagged()
+ break
+ elseif id == math_delim_code then
+ -- check for code
+ setattr(start,a_tagged,start_tagged("mo"))
+ stop_tagged()
+ break
+ elseif id == math_style_code then
+ -- has a next
+ elseif id == math_noad_code then
+ processsubsup(start)
+ elseif id == math_box_code or id == hlist_code or id == vlist_code then
+ -- keep an eye on math_box_code and see what ends up in there
+ local attr = getattr(start,a_tagged)
+ local last = attr and taglist[attr]
+ local tag, detail
+ if last then
+ local fulltag = last[#last]
+ tag, detail = lpegmatch(splittag,fulltag)
+ end
+ if tag == "formulacaption" then
+ -- skip
+ elseif tag == "mstacker" then
+ local list = getfield(start,"list")
+ if list then
+ process(list)
+ end
+ else
+ if tag ~= "mstackertop" and tag ~= "mstackermid" and tag ~= "mstackerbot" then
+ tag = "mtext"
+ end
+ local text = start_tagged(tag)
+ setattr(start,a_tagged,text)
+ local list = getfield(start,"list")
+ if not list then
+ -- empty list
+ elseif not attr then
+ -- box comes from strange place
+ set_attributes(list,a_tagged,text) -- only the first node ?
+ else
+ -- Beware, the first node in list is the actual list so we definitely
+ -- need to nest. This approach is a hack, maybe I'll make a proper
+ -- nesting feature to deal with this at another level. Here we just
+ -- fake structure by enforcing the inner one.
+ --
+ -- todo: have a local list with local tags that then get appended
+ --
+ local tagdata = taglist[attr] or { }
+ local common = #tagdata + 1
+ local function runner(list,depth) -- quite inefficient
+ local cache = { } -- we can have nested unboxed mess so best local to runner
+ local keep = nil
+ -- local keep = { } -- win case we might need to move keep outside
+ for n in traverse_nodes(list) do
+ local id = getid(n)
+ local mth = id == math_code and getsubtype(n)
+ if mth == 0 then
+ -- insert(keep,text)
+ keep = text
+ text = start_tagged("mrow")
+ common = common + 1
+ end
+ local aa = getattr(n,a_tagged)
+ if aa then
+ local ac = cache[aa]
+ if not ac then
+ local tagdata = taglist[aa]
+ local extra = #tagdata
+ if common <= extra then
+ for i=common,extra do
+ ac = restart_tagged(tagdata[i]) -- can be made faster
+ end
+ for i=common,extra do
+ stop_tagged() -- can be made faster
+ end
+ else
+ ac = text
end
- else
- ac = text
+ cache[aa] = ac
end
- cache[aa] = ac
+ setattr(n,a_tagged,ac)
+ else
+ setattr(n,a_tagged,text)
+ end
+
+ if id == hlist_code or id == vlist_code then
+ runner(getlist(n),depth+1)
+ elseif id == glyph_code then
+ runner(getfield(n,"components"),depth+1) -- this should not be needed
+ elseif id == disc_node then
+ runner(getfield(n,"pre"),depth+1) -- idem
+ runner(getfield(n,"post"),depth+1) -- idem
+ runner(getfield(n,"replace"),depth+1) -- idem
+ end
+ if mth == 1 then
+ stop_tagged()
+ -- text = remove(keep)
+ text = keep
+ common = common - 1
end
- setattr(n,a_tagged,ac)
- else
- setattr(n,a_tagged,text)
- end
- if id == hlist_code or id == vlist_code then
- runner(getlist(n))
end
end
+ runner(list,0)
end
- runner(list)
+ stop_tagged()
end
- stop_tagged()
- end
- elseif id == math_sub_code then
- local list = getfield(start,"list")
- if list then
- local attr = getattr(start,a_tagged)
- local last = attr and taglist[attr]
- local action = last and match(last[#last],"maction:(.-)%-")
- if action and action ~= "" then
- if actionstack[#actionstack] == action then
+ elseif id == math_sub_code then -- normally a hbox
+ local list = getfield(start,"list")
+ if list then
+ local attr = getattr(start,a_tagged)
+ local last = attr and taglist[attr]
+ if last then
+ local fulltag = last[#last]
+ local tag, detail = lpegmatch(splittag,fulltag)
+ if tag == "maction" then
+ if detail == "" then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ elseif actionstack[#actionstack] == action then
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ else
+ insert(actionstack,action)
+ setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
+ process(list)
+ stop_tagged()
+ remove(actionstack)
+ end
+ elseif tag == "mstacker" then -- or tag == "mstackertop" or tag == "mstackermid" or tag == "mstackerbot" then
+ setattr(start,a_tagged,start_tagged(tag))
+ process(list)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("mrow"))
+ process(list)
+ stop_tagged()
+ end
+ else -- never happens, we're always document
setattr(start,a_tagged,start_tagged("mrow"))
process(list)
stop_tagged()
- else
- insert(actionstack,action)
- setattr(start,a_tagged,start_tagged("mrow",{ detail = action }))
- process(list)
- stop_tagged()
- remove(actionstack)
end
- else
- setattr(start,a_tagged,start_tagged("mrow"))
- process(list)
- stop_tagged()
end
- end
- elseif id == math_fraction_code then
- local num = getfield(start,"num")
- local denom = getfield(start,"denom")
- local left = getfield(start,"left")
- local right = getfield(start,"right")
- if left then
- setattr(left,a_tagged,start_tagged("mo"))
- process(left)
- stop_tagged()
- end
- setattr(start,a_tagged,start_tagged("mfrac"))
- process(num)
- process(denom)
- stop_tagged()
- if right then
- setattr(right,a_tagged,start_tagged("mo"))
- process(right)
+ elseif id == math_fraction_code then
+ local num = getfield(start,"num")
+ local denom = getfield(start,"denom")
+ local left = getfield(start,"left")
+ local right = getfield(start,"right")
+ if left then
+ setattr(left,a_tagged,start_tagged("mo"))
+ process(left)
+ stop_tagged()
+ end
+ setattr(start,a_tagged,start_tagged("mfrac"))
+ process(num)
+ process(denom)
stop_tagged()
- end
- elseif id == math_choice_code then
- local display = getfield(start,"display")
- local text = getfield(start,"text")
- local script = getfield(start,"script")
- local scriptscript = getfield(start,"scriptscript")
- if display then
- process(display)
- end
- if text then
- process(text)
- end
- if script then
- process(script)
- end
- if scriptscript then
- process(scriptscript)
- end
- elseif id == math_fence_code then
- local delim = getfield(start,"delim")
- local subtype = getfield(start,"subtype")
- -- setattr(start,a_tagged,start_tagged("mfenced")) -- needs checking
- if subtype == 1 then
- -- left
- if delim then
- setattr(start,a_tagged,start_tagged("mleft"))
- process(delim)
+ if right then
+ setattr(right,a_tagged,start_tagged("mo"))
+ process(right)
stop_tagged()
end
- elseif subtype == 2 then
- -- middle
- if delim then
- setattr(start,a_tagged,start_tagged("mmiddle"))
- process(delim)
+ elseif id == math_choice_code then
+ local display = getfield(start,"display")
+ local text = getfield(start,"text")
+ local script = getfield(start,"script")
+ local scriptscript = getfield(start,"scriptscript")
+ if display then
+ process(display)
+ end
+ if text then
+ process(text)
+ end
+ if script then
+ process(script)
+ end
+ if scriptscript then
+ process(scriptscript)
+ end
+ elseif id == math_fence_code then
+ local delim = getfield(start,"delim")
+ local subtype = getfield(start,"subtype")
+ if subtype == 1 then
+ -- left
+ local properties = { }
+ insert(fencesstack,properties)
+ setattr(start,a_tagged,start_tagged("mfenced",nil,properties)) -- needs checking
+ if delim then
+ start_tagged("ignore")
+ properties.left = getfield(delim,"small_char")
+ process(delim)
+ stop_tagged()
+ end
+ elseif subtype == 2 then
+ -- middle
+ if delim then
+ start_tagged("ignore")
+ fencesstack[#fencesstack].middle = getfield(delim,"small_char")
+ process(delim)
+ stop_tagged()
+ end
+ elseif subtype == 3 then
+ local properties = remove(fencesstack)
+ if not properties then
+ report_tags("missing right fence")
+ properties = { }
+ end
+ if delim then
+ start_tagged("ignore")
+ properties.right = getfield(delim,"small_char")
+ process(delim)
+ stop_tagged()
+ end
stop_tagged()
+ else
+ -- can't happen
end
- elseif subtype == 3 then
- if delim then
- setattr(start,a_tagged,start_tagged("mright"))
- process(delim)
+ elseif id == math_radical_code then
+ local left = getfield(start,"left")
+ local degree = getfield(start,"degree")
+ if left then
+ start_tagged("ignore")
+ process(left) -- root symbol, ignored
stop_tagged()
end
- else
- -- can't happen
- end
- -- stop_tagged()
- elseif id == math_radical_code then
- local left = getfield(start,"left")
- local degree = getfield(start,"degree")
- if left then
- start_tagged("ignore")
- process(left) -- root symbol, ignored
- stop_tagged()
- end
- if degree then -- not good enough, can be empty mlist
- setattr(start,a_tagged,start_tagged("mroot"))
- processsubsup(start)
- process(degree)
- stop_tagged()
- else
- setattr(start,a_tagged,start_tagged("msqrt"))
- processsubsup(start)
- stop_tagged()
- end
- elseif id == math_accent_code then
- local accent = getfield(start,"accent")
- local bot_accent = getfield(start,"bot_accent")
- if bot_accent then
- if accent then
- setattr(start,a_tagged,start_tagged("munderover",{ detail = "accent" }))
+ if degree then -- not good enough, can be empty mlist
+ setattr(start,a_tagged,start_tagged("mroot"))
processsubsup(start)
- process(bot_accent)
- process(accent)
+ process(degree)
stop_tagged()
else
- setattr(start,a_tagged,start_tagged("munder",{ detail = "accent" }))
+ setattr(start,a_tagged,start_tagged("msqrt"))
processsubsup(start)
- process(bot_accent)
stop_tagged()
end
- elseif accent then
- setattr(start,a_tagged,start_tagged("mover",{ detail = "accent" }))
- processsubsup(start)
- process(accent)
+ elseif id == math_accent_code then
+ local accent = getfield(start,"accent")
+ local bot_accent = getfield(start,"bot_accent")
+ local subtype = getsubtype(start)
+ if bot_accent then
+ if accent then
+ setattr(start,a_tagged,start_tagged("munderover", detail_accent, {
+ top = getunicode(accent),
+ bottom = getunicode(bot_accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ process(accent)
+ stop_tagged()
+ else
+ setattr(start,a_tagged,start_tagged("munder", detail_accent, {
+ bottom = getunicode(bot_accent),
+ bottomfixed = subtype == math_fixed_bottom or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(bot_accent)
+ stop_tagged()
+ end
+ elseif accent then
+ setattr(start,a_tagged,start_tagged("mover", detail_accent, {
+ top = getunicode(accent),
+ topfixed = subtype == math_fixed_top or subtype == math_fixed_both,
+ }))
+ processsubsup(start)
+ process(accent)
+ stop_tagged()
+ else
+ processsubsup(start)
+ end
+ elseif id == glue_code then
+ -- local spec = getfield(start,"spec")
+ -- setattr(start,a_tagged,start_tagged("mspace",nil,spec and { width = getfield(spec,"width") }))
+ setattr(start,a_tagged,start_tagged("mspace"))
stop_tagged()
else
- processsubsup(start)
+ setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
+ stop_tagged()
end
- elseif id == glue_code then
- setattr(start,a_tagged,start_tagged("mspace"))
- stop_tagged()
- else
- setattr(start,a_tagged,start_tagged("merror", { detail = nodecodes[i] }))
- stop_tagged()
end
start = getnext(start)
end
+ if mtexttag then
+ stop_tagged()
+ end
end
function noads.handlers.tags(head,style,penalties)
diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv
index a45f4e440..5761e6cfb 100644
--- a/tex/context/base/mult-def.mkiv
+++ b/tex/context/base/mult-def.mkiv
@@ -34,6 +34,8 @@
% start todo:
+\def\c!svgstyle {svgstyle}
+
\def\c!nextleft {nextleft}
\def\c!nextright {nextright}
\def\c!nextleftquotation {nextleftquotation}
@@ -92,6 +94,10 @@
\def\v!mixed {mixed}
\def\v!centerlast {centerlast}
\def\v!long {long}
+\def\v!box {box}
+
+\def\v!vfenced {vfenced}
+\def\v!bothtext {bothtext}
\def\s!lcgreek {lcgreek}
\def\s!ucgreek {ucgreek}
diff --git a/tex/context/base/node-ltp.lua b/tex/context/base/node-ltp.lua
index 6ad5de140..5a826cc0d 100644
--- a/tex/context/base/node-ltp.lua
+++ b/tex/context/base/node-ltp.lua
@@ -2547,10 +2547,10 @@ function diagnostics.feasible_break(par, current, r, b, pi, d, artificial_demeri
par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
else
local save_link = getnext(current)
- setfield(cur_p,"next",nil)
+ setfield(current,"next",nil)
write_nl("log","")
par.font_in_short_display = short_display("log",getnext(printed_node),par.font_in_short_display)
- setfield(cur_p,"next",save_link)
+ setfield(current,"next",save_link)
end
par.printed_node = current
end
@@ -3145,7 +3145,7 @@ local function hpack(head,width,method,direction,firstline,line) -- fast version
end
diagnostics.overfull_hbox(hlist,line,-delta)
end
- elseif order == 0 and hlist.list and last_badness > tex.hbadness then
+ elseif order == 0 and getlist(hlist) and last_badness > tex.hbadness then
diagnostics.bad_hbox(hlist,line,last_badness)
end
end
diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua
index 51d8f674e..52c1e4845 100644
--- a/tex/context/base/spac-ver.lua
+++ b/tex/context/base/spac-ver.lua
@@ -134,11 +134,13 @@ local default = {
strut = true,
hfraction = 1,
dfraction = 1,
+ bfraction = 0.25,
}
local fractions = {
minheight = "hfraction", maxheight = "hfraction",
mindepth = "dfraction", maxdepth = "dfraction",
+ box = "bfraction",
top = "tlines", bottom = "blines",
}
@@ -332,12 +334,33 @@ local function snap_hlist(where,current,method,height,depth) -- method.strut is
end
local h = height or getfield(current,"height")
local d = depth or getfield(current,"depth")
- local hr, dr, ch, cd = method.hfraction or 1, method.dfraction or 1, h, d
+ local hr, dr, ch, cd, br = method.hfraction or 1, method.dfraction or 1, h, d, method.bfraction or 0
local tlines, blines = method.tlines or 1, method.blines or 1
local done, plusht, plusdp = false, snapht, snapdp
local snaphtdp = snapht + snapdp
- if method.none then
+ if method.box then
+ local br = 1 - br
+ if br < 0 then
+ br = 0
+ elseif br > 1 then
+ br = 1
+ end
+ local n = ceiled((h+d-br*snapht-br*snapdp)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.max then
+ local n = ceiled((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.min then
+ local n = floored((h+d)/snaphtdp)
+ local x = n * snaphtdp - h - d
+ plusht = h + x / 2
+ plusdp = d + x / 2
+ elseif method.none then
plusht, plusdp = 0, 0
if t then
t[#t+1] = "none: plusht 0pt plusdp 0pt"
diff --git a/tex/context/base/spac-ver.mkiv b/tex/context/base/spac-ver.mkiv
index 409dd985c..7257b4ef9 100644
--- a/tex/context/base/spac-ver.mkiv
+++ b/tex/context/base/spac-ver.mkiv
@@ -1412,6 +1412,9 @@
\definegridsnapping[\v!none] [\v!none]
\definegridsnapping[\v!line] [\v!line]
\definegridsnapping[\v!strut] [\v!strut]
+\definegridsnapping[\v!box] [\v!box] % centers a box rounded upwards (box:.5 -> tolerance)
+\definegridsnapping[\v!min] [\v!min] % centers a box rounded downwards
+\definegridsnapping[\v!max] [\v!max] % centers a box rounded upwards
\definegridsnapping[\v!max] [\v!maxdepth,\v!maxheight,\v!strut]
\definegridsnapping[\v!min] [\v!mindepth,\v!minheight,\v!strut]
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 9dc680772..d80f65499 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 3936e2e7c..8c8e0b9b0 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/status-mkiv.lua b/tex/context/base/status-mkiv.lua
index 05bfd7c93..157aa0e7e 100644
--- a/tex/context/base/status-mkiv.lua
+++ b/tex/context/base/status-mkiv.lua
@@ -542,6 +542,12 @@ return {
},
{
category = "mkiv",
+ filename = "lang-hyp",
+ loading = "always",
+ status = "okay",
+ },
+ {
+ category = "mkiv",
filename = "unic-ini",
loading = "always",
status = "okay",
@@ -3502,6 +3508,12 @@ return {
},
{
category = "lua",
+ filename = "lang-hyp",
+ loading = "lang-hyp",
+ status = "okay",
+ },
+ {
+ category = "lua",
filename = "lang-txt",
loading = "lang-lab",
status = "okay",
diff --git a/tex/context/base/strc-tag.lua b/tex/context/base/strc-tag.lua
index f51c9d5d0..ad6117728 100644
--- a/tex/context/base/strc-tag.lua
+++ b/tex/context/base/strc-tag.lua
@@ -160,6 +160,11 @@ local properties = allocate {
mfenced = { pdf = "Span", nature = "display" },
maction = { pdf = "Span", nature = "display" },
+ mstacker = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackertop = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackerbot = { pdf = "Span", nature = "display" }, -- these are only internally used
+ mstackermid = { pdf = "Span", nature = "display" }, -- these are only internally used
+
mtable = { pdf = "Table", nature = "display" }, -- might change
mtr = { pdf = "TR", nature = "display" }, -- might change
mtd = { pdf = "TD", nature = "display" }, -- might change
@@ -241,7 +246,7 @@ end
local nstack = 0
-function tags.start(tag,specification)
+function tags.start(tag,specification,props)
local label, detail, user
if specification then
label = specification.label
@@ -282,6 +287,9 @@ function tags.start(tag,specification)
tagmetadata[completetag] = metadata
metadata = nil
end
+ if props then
+ properties[completetag] = props
+ end
texattribute[a_tagged] = t
return t
end
diff --git a/tex/context/base/strc-tag.mkiv b/tex/context/base/strc-tag.mkiv
index 39dba8259..9e850c85e 100644
--- a/tex/context/base/strc-tag.mkiv
+++ b/tex/context/base/strc-tag.mkiv
@@ -87,6 +87,10 @@
\def\t!mathtablerow {mtr} % TR
\def\t!mathtablecell {mtd} % TD
\def\t!mathaction {maction} %
+\def\t!mathstacker {mstacker}
+\def\t!mathstackertop {mstackertop}
+\def\t!mathstackermid {mstackermid}
+\def\t!mathstackerbot {mstackerbot}
\def\t!list {list} % TOC
\def\t!listitem {listitem} % TOCI
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index 5096e0042..8ec499ee4 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -165,9 +165,6 @@ local localpar_code = whatsitcodes.localpar
local nodepool = nuts.pool
local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-local new_penalty = nodepool.penalty
-local new_stretch = nodepool.stretch
local new_usernumber = nodepool.usernumber
local new_latelua = nodepool.latelua
@@ -892,9 +889,9 @@ function margins.finalhandler(head)
-- if trace_margindata then
-- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
-- end
-head = tonut(head)
-local head, done = finalhandler(head)
-head = tonode(head)
+ head = tonut(head)
+ local head, done = finalhandler(head)
+ head = tonode(head)
return head, done
else
return head, false
diff --git a/tex/context/base/x-math-svg.lua b/tex/context/base/x-math-svg.lua
new file mode 100644
index 000000000..b96c2c63e
--- /dev/null
+++ b/tex/context/base/x-math-svg.lua
@@ -0,0 +1,162 @@
+if not modules then modules = { } end modules ['x-math-svg'] = {
+ version = 1.001,
+ comment = "companion to x-math-svg.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tostring, type, next = tostring, type, next
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
+
+local xmlfirst = xml.first
+local xmlconvert = xml.convert
+local xmlload = xml.load
+local xmlsave = xml.save
+local xmlcollected = xml.collected
+local xmldelete = xml.delete
+
+local loadtable = table.load
+local savetable = table.save
+
+local replacesuffix = file.replacesuffix
+local addsuffix = file.addsuffix
+local removefile = os.remove
+local isfile = lfs.isfile
+
+local formatters = string.formatters
+
+moduledata = moduledata or table.setmetatableindex("table")
+local svgmath = moduledata.svgmath -- autodefined
+
+local namedata = { }
+local pagedata = { }
+
+local statusname = "x-math-svg-status.lua"
+local pdfname = "x-math-svg.pdf"
+
+local pdftosvg = os.which("mudraw")
+
+local f_make_tex = formatters[ [[context --global kpse:x-math-svg.mkvi --inputfile="%s" --svgstyle="%s" --batch --noconsole --once --purgeall]] ]
+local f_make_svg = formatters[ [[mudraw -o "math-%%d.svg" "%s" 1-9999]] ]
+
+local f_inline = formatters[ [[<div class='math-inline' style='vertical-align:%p'></div>]] ]
+local f_display = formatters[ [[<div class='math-display'></div>]] ]
+
+local f_math_tmp = formatters[ [[math-%i]] ]
+
+function svgmath.process(filename)
+ if not filename then
+ -- no filename given
+ return
+ elseif not isfile(filename) then
+ -- invalid filename
+ return
+ end
+ local index = 0
+ local page = 0
+ local blobs = { }
+ local root = xmlload(filename)
+ for mth in xmlcollected(root,"math") do
+ index = index + 1
+ local blob = tostring(mth)
+ if blobs[blob] then
+ context.ReuseSVGMath(index,blobs[blob])
+ else
+ page = page + 1
+ buffers.assign(f_math_tmp(page),blob)
+ context.MakeSVGMath(index,page,mth.at.display)
+ blobs[blob] = page
+ end
+ end
+ context(function()
+ savetable(statusname, {
+ pagedata = pagedata,
+ namedata = namedata,
+ })
+ end)
+end
+
+function svgmath.register(index,page,specification)
+ if specification then
+ pagedata[page] = specification
+ end
+ namedata[index] = page
+end
+
+function svgmath.convert(filename,svgstyle)
+ if not filename then
+ -- no filename given
+ return false, "no filename"
+ elseif not isfile(filename) then
+ -- invalid filename
+ return false, "invalid filename"
+ elseif not pdftosvg then
+ return false, "mudraw is not installed"
+ end
+
+ os.execute(f_make_tex(filename,svgstyle))
+
+ local data = loadtable(statusname)
+ if not data then
+ -- invalid tex run
+ return false, "invalid tex run"
+ elseif not next(data) then
+ return false, "no converson needed"
+ end
+
+ local pagedata = data.pagedata
+ local namedata = data.namedata
+
+ os.execute(f_make_svg(pdfname))
+
+ local root = xmlload(filename)
+ local index = 0
+ local done = { }
+ local unique = 0
+
+ local between = (1-P("<"))^1/""
+ local strip = Cs((
+ (P("<text") * ((1-P("</text>"))^1) * P("</text>")) * between^0 / "" +
+ P(">") * between +
+ P(1)
+ )^1)
+
+ for mth in xmlcollected(root,"m:math") do
+ index = index + 1
+ local page = namedata[index]
+ if done[page] then
+ mth.__p__.dt[mth.ni] = done[page]
+ else
+ local info = pagedata[page]
+ local depth = info.depth
+ local mode = info.mode
+ local svgname = addsuffix(f_math_tmp(page),"svg")
+ local action = mode == "inline" and f_inline or f_display
+ local x_div = xmlfirst(xmlconvert(action(-depth)),"/div")
+ local svgdata = io.loaddata(svgname)
+ if not svgdata or svgdata == "" then
+ print("error in:",svgname,tostring(mth))
+ else
+ -- svgdata = string.gsub(svgdata,">%s<","")
+ svgdata = lpegmatch(strip,svgdata)
+ local x_svg = xmlfirst(xmlconvert(svgdata),"/svg")
+ -- xmldelete(x_svg,"text")
+ x_div.dt = { x_svg }
+ mth.__p__.dt[mth.ni] = x_div -- use helper
+ end
+ done[page] = x_div
+ unique = unique + 1
+ end
+ end
+
+-- for k, v in next, data do
+-- removefile(addsuffix(k,"svg"))
+-- end
+-- removefile(statusname)
+-- removefile(pdfname)
+
+ xmlsave(root,filename)
+
+ return true, index, unique
+end
diff --git a/tex/context/base/x-mathml.mkiv b/tex/context/base/x-mathml.mkiv
index 14cf583d1..093304e7d 100644
--- a/tex/context/base/x-mathml.mkiv
+++ b/tex/context/base/x-mathml.mkiv
@@ -1,4 +1,4 @@
-%D \modul
+%D \module
%D [ file=x-mathml,
%D version=2008.05.29,
%D title=\CONTEXT\ XML Modules,
@@ -23,6 +23,10 @@
%
% todo: more will be moved to lua (less hassle)
% todo: move left/right to the lua end
+%
+% this implememation looks like a hack ... this is because we deal with all weird cases we
+% ran into, including abuse that was supposed to render ok (even if it didn't in other
+% renderers) .. it was simply expected to work that way.
\writestatus{loading}{ConTeXt XML Macros / MathML Renderer}
@@ -2231,6 +2235,8 @@
}
\stopxmlsetups
+% helpers
+
\unexpanded\def\mmlexecuteifdefined#1%
{\ifx#1\empty
\expandafter\secondoftwoarguments
@@ -2241,142 +2247,156 @@
\fi\fi
{\csname#1\endcsname}}
-% todo: combine topaccent/over/bottomaccent/under check
-
-\definemathextensible [\v!mathematics] [mml:overleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:overrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:overleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:overtwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:overleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:overleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:overrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:overrightharpoonup] ["21C0]
-
-\definemathextensible [\v!mathematics] [mml:underleftarrow] ["2190] % ["27F5]
-\definemathextensible [\v!mathematics] [mml:underrightarrow] ["2192] % ["27F6]
-\definemathextensible [\v!mathematics] [mml:underleftrightarrow] ["27F7]
-\definemathextensible [\v!mathematics] [mml:undertwoheadrightarrow] ["27F9]
-\definemathextensible [\v!mathematics] [mml:underleftharpoondown] ["21BD]
-\definemathextensible [\v!mathematics] [mml:underleftharpoonup] ["21BC]
-\definemathextensible [\v!mathematics] [mml:underrightharpoondown] ["21C1]
-\definemathextensible [\v!mathematics] [mml:underrightharpoonup] ["21C0]
-
-\definemathtriplet [\v!mathematics] [mmlovertriplet]
-\definemathtriplet [\v!mathematics] [mmlundertriplet]
-\definemathtriplet [\v!mathematics] [mmldoubletriplet]
-
-% alternative:
-%
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2190}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x2192}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F5}] ["2190] % ["27F5]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F6}] ["2192] % ["27F6]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F7}] ["27F7]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x27F9}] ["27F9]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BD}] ["21BD]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21BC}] ["21BC]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C1}] ["21C1]
-% \definemathextensible [\v!mathematics] [mml:\utfchar{0x21C0}] ["21C0]
-
-\unexpanded\def\mmloverof#1{\mmlexecuteifdefined\mmlovercommand\relax{\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloveros#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmloverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmloverbs#1{\mmlexecuteifdefined\mmlbasecommand\relax{\mmlunexpandedsecond{#1}}\relax}
+\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
-\startxmlsetups mml:mover
- \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
- \doifelseutfmathabove\mmlovertoken {
- \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
- \mmloverof{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathabove\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandabove\mmlbasetoken}
- \mmloverbs{#1}
- } {
- \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
- \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
- \mmlundertriplet{\mmloverbf{#1}}{\mmloveros{#1}}{}%\relax
- }
- }
- % \limits % spoils spacing
-\stopxmlsetups
+\definemathtriplet [\v!mathematics] [mmlovertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmlundertriplet] % or will we use a special instance
+\definemathtriplet [\v!mathematics] [mmldoubletriplet] % or will we use a special instance
-% alternative:
-%
-% \startxmlsetups mml:mover
-% \edef\mmlovertoken{\xmlraw{#1}{/mml:*[2]}}% /text()
-% \doifelseutfmathabove\mmlovertoken {
-% \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
-% \mmloverof{#1}
-% } {
-% \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]/text()}}
-% \ifcsname mml:\mmlbasetoken\endcsname
-% \csname mml:\mmlbasetoken\endcsname{\mmlunexpandedsecond{#1}}\relax
-% \else
-% \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
-% \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
-% \mmlovertriplet{\mmloveros{#1}}{\mmloverbf{#1}}\relax
-% \fi
-% }
-% % \limits % spoils spacing
-% \stopxmlsetups
+% common to munder/mover/munderover
-% do this in lua
+\starttexdefinition unexpanded mmlfencedfirst #1
+ \math_fences_checked_start
+ \mmlunexpandedfirst{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedsecond #1
+ \math_fences_checked_start
+ \mmlunexpandedsecond{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
+\starttexdefinition unexpanded mmlfencedthird #1
+ \math_fences_checked_start
+ \mmlunexpandedthird{#1}
+ \math_fences_checked_stop
+\stoptexdefinition
-\def\mmlextensible#1{\ctxmodulemathml{extensible(\!!bs#1\!!es)}}
+% mover
-% \unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\mmlunexpandedfirst {#1}}\relax}
-% \unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-% \unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-% %unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\mmlunexpandedsecond{#1}}\relax}
-% \unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax {\mmlunexpandedsecond{#1}}\relax}
+\starttexdefinition unexpanded mmloverabove #1
+ \edef\mmlovercommand{\utfmathfiller\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmloverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedsecond{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccent #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \mmlexecuteifdefined\mmlovercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlovertext #1
+ \mmlovertriplet {\mmloverbase{#1}} {\mmloverabove{#1}} {}
+\stoptexdefinition
+\starttexdefinition unexpanded mmloveraccentchecker #1
+ \edef\mmlovertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \doifelseutfmathabove\mmlovertoken \mmloveraccent \mmlovertext {#1}
+\stoptexdefinition
-% \MMLhack
+\startxmlsetups mml:mover
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmloverbasefiller \mmloveraccentchecker {#1}
+\stopxmlsetups
-\unexpanded\def\mmlunderuf#1{\mmlexecuteifdefined\mmlundercommand\relax {\math_fences_checked_start\mmlunexpandedfirst {#1}\math_fences_checked_stop}\relax}
-\unexpanded\def\mmlunderus#1{\mmlexecuteifdefined\mmlundercommand {\math_fences_checked_start\mmlunexpandedsecond{#1}\math_fences_checked_stop}\relax}
-\unexpanded\def\mmlunderbf#1{\mmlexecuteifdefined\mmlbasecommand {\math_fences_checked_start\mmlunexpandedfirst {#1}\math_fences_checked_stop}\relax}
-%unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax{}{\math_fences_checked_start\mmlunexpandedsecond{#1}\math_fences_checked_stop}\relax}
-\unexpanded\def\mmlunderbs#1{\mmlexecuteifdefined\mmlbasecommand \relax {\math_fences_checked_start\mmlunexpandedsecond{#1}\math_fences_checked_stop}\relax}
+% munder
-\startxmlsetups mml:munder
+\starttexdefinition unexpanded mmlunderbelow #1
+ \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbase #1
+ \edef\mmlbasecommand{\utfmathfiller\mmlbasetoken}
+ \mmlexecuteifdefined\mmlbasecommand {\mmlfencedfirst{#1}}
+ \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccent #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \mmlexecuteifdefined\mmlundercommand \relax {\mmlfencedfirst{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlundertext #1
+ \mmlundertriplet {\mmlunderbase{#1}} {} {\mmlunderbelow{#1}}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderaccentchecker #1
\edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
- \doifelseutfmathbelow\mmlundertoken {%
- \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
- \mmlunderuf{#1}
- } {
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderbs{#1}
- } {
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \mmlundertriplet{\mmlunderbf{#1}}{}{\mmlunderus{#1}}%\relax
- }
- }
- % \limits % spoils spacing
+ \doifelseutfmathbelow\mmlundertoken \mmlunderaccent \mmlundertext {#1}
+\stoptexdefinition
+
+\startxmlsetups mml:munder
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderbasefiller \mmlunderaccentchecker {#1}
\stopxmlsetups
-\unexpanded\def\mmlunderoverst#1{\mmlexecuteifdefined\mmlbasecommand \relax{\mmlunexpandedsecond{#1}}{\mmlunexpandedthird{#1}}\relax}
-\unexpanded\def\mmlunderoverbf#1{\mmlexecuteifdefined\mmlbasecommand {\mmlunexpandedfirst {#1}}\relax}
-\unexpanded\def\mmlunderoverus#1{\mmlexecuteifdefined\mmlundercommand {\mmlunexpandedsecond{#1}}\relax}
-\unexpanded\def\mmlunderoverot#1{\mmlexecuteifdefined\mmlovercommand {\mmlunexpandedthird {#1}}\relax}
+% munderover
-\startxmlsetups mml:munderover
- \edef\mmlbasetoken{\xmlraw{#1}{/mml:*[1]}}% /text()
- \doifelseutfmathbelow\mmlbasetoken {
- \edef\mmlbasecommand{mml:\utfmathcommandbelow\mmlbasetoken}
- \mmlunderoverst{#1}
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUO #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlovercommand {\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand {\mmlovercommand\mmlundercommand}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \ifcsname\mmlovercommand\endcsname
+ \csname\mmlovercommand\endcsname {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}}
+ \else
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}\mmlfencedthird{#1}} {}
+ \fi
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerUT #1
+ \edef\mmlundercommand{\utfmathcommandbelow\mmlundertoken}
+ \edef\mmlbasecommand {\mmlundercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}}
+ \else\ifcsname\mmlundercommand\endcsname
+ \mmldoubletriplet {\csname\mmlundercommand\endcsname{\mmlfencedfirst{#1}}} {\mmlfencedthird{#1}} {}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerOT #1
+ \edef\mmlovercommand{\utfmathcommandabove\mmlovertoken}
+ \edef\mmlbasecommand{\mmlovercommand text}
+ \ifcsname\mmlbasecommand\endcsname
+ \csname\mmlbasecommand\endcsname {\mmlfencedfirst{#1}} {\mmlfencedsecond{#1}}
+ \else\ifcsname\mmlovercommand\endcsname
+ \mmldoubletriplet {\csname\mmlovercommand\endcsname{\mmlfencedfirst{#1}}} {} {\mmlfencedsecond{#1}}
+ \else
+ \mmlunderoveraccentcheckerTT {#1}
+ \fi\fi
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentcheckerTT #1
+ \mmldoubletriplet {\mmlfencedfirst{#1}} {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}} \relax
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoveraccentchecker #1
+ \edef\mmlundertoken{\mmlextensible{\xmlraw{#1}{/mml:*[2]}}}% /text()
+ \edef\mmlovertoken {\mmlextensible{\xmlraw{#1}{/mml:*[3]}}}% /text()
+ \doifelseutfmathbelow\mmlundertoken {
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerUO \mmlunderoveraccentcheckerUT {#1}
} {
- \edef\mmlundertoken {\xmlraw{#1}{/mml:*[2]}}% /text()
- \edef\mmlovertoken {\xmlraw{#1}{/mml:*[3]}}% /text()
- \edef\mmlbasecommand {\utfmathfiller\mmlbasetoken}
- \edef\mmlundercommand{\utfmathfiller\mmlundertoken}
- \edef\mmlovercommand {\utfmathfiller\mmlovertoken}
- \mmldoubletriplet{\mmlunderoverbf{#1}}{\mmlunderoverot{#1}}{\mmlunderoverus{#1}}\relax
+ \doifelseutfmathabove\mmlovertoken \mmlunderoveraccentcheckerOT \mmlunderoveraccentcheckerTT {#1}
}
+\stoptexdefinition
+\starttexdefinition unexpanded mmlunderoverbasefiller #1
+ \edef\mmlbasecommand{e\utfmathcommandfiller\mmlbasetoken}%
+ \mmlexecuteifdefined\mmlbasecommand \relax {\mmlfencedthird{#1}} {\mmlfencedsecond{#1}}
+\stoptexdefinition
+
+\startxmlsetups mml:munderover
+ \edef\mmlbasetoken{\mmlextensible{\xmlraw{#1}{/mml:*[1]}}}% /text()
+ \doifelseutfmathfiller\mmlbasetoken \mmlunderoverbasefiller \mmlunderoveraccentchecker {#1}
\stopxmlsetups
% tables (mml:mtable, mml:mtr, mml:mlabledtr, mml:mtd)
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index d58fa5f1c..22149730b 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 09/18/14 11:17:09
+-- merge date : 09/25/14 18:58:26
do -- begin closure to overcome local limits and interference